diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index f5de54183ef..980df1c2123 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -543,11 +543,11 @@ public final class HConstants { /** * Parameter name for unique identifier for this {@link org.apache.hadoop.conf.Configuration} * instance. If there are two or more {@link org.apache.hadoop.conf.Configuration} instances that, - * for all intents and purposes, are the same except for their instance ids, - * then they will not be able to share the same {@link org.apache.hadoop.hbase.client.HConnection} instance. - * On the other hand, even if the instance ids are the same, it could result - * in non-shared {@link org.apache.hadoop.hbase.client.HConnection} - * instances if some of the other connection parameters differ. + * for all intents and purposes, are the same except for their instance ids, then they will not be + * able to share the same org.apache.hadoop.hbase.client.HConnection instance. On the other hand, + * even if the instance ids are the same, it could result in non-shared + * org.apache.hadoop.hbase.client.HConnection instances if some of the other connection parameters + * differ. */ public static String HBASE_CLIENT_INSTANCE_ID = "hbase.client.instance.id"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index b92c32c16f6..2a60eb74af6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -41,30 +41,25 @@ import org.apache.hadoop.io.Writable; import com.google.common.primitives.Longs; /** - * An HBase Key/Value. This is the fundamental HBase Type. - * - *

If being used client-side, the primary methods to access individual fields - * are {@link #getRow()}, {@link #getFamily()}, {@link #getQualifier()}, - * {@link #getTimestamp()}, and {@link #getValue()}. These methods allocate new - * byte arrays and return copies. Avoid their use server-side. - * - *

Instances of this class are immutable. They do not implement Comparable - * but Comparators are provided. Comparators change with context, - * whether user table or a catalog table comparison. Its critical you use the - * appropriate comparator. There are Comparators for KeyValue instances and - * then for just the Key portion of a KeyValue used mostly by {@link HFile}. - * - *

KeyValue wraps a byte array and takes offsets and lengths into passed - * array at where to start interpreting the content as KeyValue. The KeyValue - * format inside a byte array is: - * <keylength> <valuelength> <key> <value> - * Key is further decomposed as: + * An HBase Key/Value. This is the fundamental HBase Type. + *

+ * If being used client-side, the primary methods to access individual fields are {@link #getRow()}, + * {@link #getFamily()}, {@link #getQualifier()}, {@link #getTimestamp()}, and {@link #getValue()}. + * These methods allocate new byte arrays and return copies. Avoid their use server-side. + *

+ * Instances of this class are immutable. They do not implement Comparable but Comparators are + * provided. Comparators change with context, whether user table or a catalog table comparison. Its + * critical you use the appropriate comparator. There are Comparators for KeyValue instances and + * then for just the Key portion of a KeyValue used mostly by HFile. + *

+ * KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start + * interpreting the content as KeyValue. The KeyValue format inside a byte array is: + * <keylength> <valuelength> <key> <value> Key is further decomposed as: * <rowlength> <row> <columnfamilylength> <columnfamily> <columnqualifier> <timestamp> <keytype> - * The rowlength maximum is Short.MAX_SIZE, - * column family length maximum is - * Byte.MAX_SIZE, and column qualifier + key length must - * be < Integer.MAX_SIZE. - * The column does not contain the family/qualifier delimiter, {@link #COLUMN_FAMILY_DELIMITER} + * The rowlength maximum is Short.MAX_SIZE, column family length maximum + * is Byte.MAX_SIZE, and column qualifier + key length must be < + * Integer.MAX_SIZE. The column does not contain the family/qualifier delimiter, + * {@link #COLUMN_FAMILY_DELIMITER} */ @InterfaceAudience.Public @InterfaceStability.Evolving diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java index 423db17e0b3..135d25c9acc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java @@ -29,9 +29,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; /** - * Various types of {@link HFile} blocks. Ordinal values of these enum constants - * must not be relied upon. The values in the enum appear in the order they - * appear in a version 2 {@link HFile}. + * Various types of HFile blocks. Ordinal values of these enum constants must not be relied upon. + * The values in the enum appear in the order they appear in a version 2 HFile. */ @InterfaceAudience.Private public enum BlockType { diff --git a/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/metrics/BaseMetricsSourceImpl.java b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/metrics/BaseMetricsSourceImpl.java index 398d9a34c38..0943370b0d6 100644 --- a/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/metrics/BaseMetricsSourceImpl.java +++ b/hbase-hadoop1-compat/src/main/java/org/apache/hadoop/hbase/metrics/BaseMetricsSourceImpl.java @@ -167,7 +167,7 @@ public class BaseMetricsSourceImpl implements BaseMetricsSource, MetricsSource { /** * Used to get at the DynamicMetricsRegistry. - * @return + * @return DynamicMetricsRegistry */ protected DynamicMetricsRegistry getMetricsRegistry() { return metricsRegistry; diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java index fbd68c61746..a4238931d81 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java @@ -414,7 +414,6 @@ public class DynamicMetricsRegistry { * * @param gaugeName name of the gauge to create or get. * @param potentialStartingValue value of the new gauge if we have to create it. - * @return */ public MutableGaugeLong getLongGauge(String gaugeName, long potentialStartingValue) { //Try and get the guage. @@ -450,7 +449,6 @@ public class DynamicMetricsRegistry { * * @param counterName Name of the counter to get * @param potentialStartingValue starting value if we have to create a new counter - * @return */ public MutableCounterLong getLongCounter(String counterName, long potentialStartingValue) { //See getLongGauge for description on how this works. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ClusterId.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ClusterId.java index a2b40c4c915..a8e8560b218 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ClusterId.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ClusterId.java @@ -58,7 +58,7 @@ public class ClusterId { * @param bytes A pb serialized {@link ClusterId} instance with pb magic prefix * @return An instance of {@link ClusterId} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static ClusterId parseFrom(final byte [] bytes) throws DeserializationException { if (ProtobufUtil.isPBMagicPrefix(bytes)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseException.java index 60444f4d0b5..28fe3376038 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseException.java @@ -21,7 +21,7 @@ import org.apache.hadoop.classification.InterfaceAudience; /** * Base checked exception in HBase. - * @see https://issues.apache.org/jira/browse/HBASE-5796 + * @see HBASE-5796 */ @SuppressWarnings("serial") @InterfaceAudience.Private diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java index b0f3601ae99..193dc911e36 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java @@ -35,23 +35,14 @@ public class HBaseIOException extends IOException { super(); } - /** - * {@inheritDoc} - */ public HBaseIOException(String message) { super(message); } - /** - * {@inheritDoc} - **/ public HBaseIOException(String message, Throwable cause) { super(message, cause); } - /** - * {@inheritDoc} - */ public HBaseIOException(Throwable cause) { super(cause); }} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 1685d97cef4..95684965f53 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -1086,7 +1086,7 @@ public class HColumnDescriptor implements WritableComparable /** * @return This instance serialized with pb with pb magic prefix - * @see {@link #parseFrom(byte[])} + * @see #parseFrom(byte[]) */ public byte [] toByteArray() { return ProtobufUtil.prependPBMagic(convert().toByteArray()); @@ -1096,7 +1096,7 @@ public class HColumnDescriptor implements WritableComparable * @param bytes A pb serialized {@link HColumnDescriptor} instance with pb magic prefix * @return An instance of {@link HColumnDescriptor} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static HColumnDescriptor parseFrom(final byte [] bytes) throws DeserializationException { if (!ProtobufUtil.isPBMagicPrefix(bytes)) throw new DeserializationException("No magic"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index fe718f69311..6b795ed91b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -731,8 +731,8 @@ public class HRegionInfo implements Comparable { } /** - * @deprecated Use protobuf deserialization instead. See {@link #parseFrom(byte[])} and - * {@link #parseFrom(FSDataInputStream)} + * @deprecated Use protobuf deserialization instead. + * @see #parseFrom(byte[]) */ @Deprecated public void readFields(DataInput in) throws IOException { @@ -914,7 +914,7 @@ public class HRegionInfo implements Comparable { /** * @return This instance serialized as protobuf w/ a magic pb prefix. - * @see #parseFrom(byte[]); + * @see #parseFrom(byte[]) */ public byte [] toByteArray() { byte [] bytes = convert().toByteArray(); @@ -924,7 +924,7 @@ public class HRegionInfo implements Comparable { /** * @param bytes * @return A deserialized {@link HRegionInfo} or null if we failed deserialize or passed bytes null - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static HRegionInfo parseFromOrNull(final byte [] bytes) { if (bytes == null || bytes.length <= 0) return null; @@ -939,7 +939,7 @@ public class HRegionInfo implements Comparable { * @param bytes A pb RegionInfo serialized with a pb magic prefix. * @return A deserialized {@link HRegionInfo} * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static HRegionInfo parseFrom(final byte [] bytes) throws DeserializationException { if (ProtobufUtil.isPBMagicPrefix(bytes)) { @@ -967,7 +967,7 @@ public class HRegionInfo implements Comparable { * the pb mergeDelimitedFrom (w/o the delimiter, pb reads to EOF which may not be what you want). * @return This instance serialized as a delimited protobuf w/ a magic pb prefix. * @throws IOException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public byte [] toDelimitedByteArray() throws IOException { return ProtobufUtil.toDelimitedByteArray(convert()); @@ -1096,7 +1096,7 @@ public class HRegionInfo implements Comparable { * @param infos HRegionInfo objects to serialize * @return This instance serialized as a delimited protobuf w/ a magic pb prefix. * @throws IOException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static byte[] toDelimitedByteArray(HRegionInfo... infos) throws IOException { byte[][] bytes = new byte[infos.length][]; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HServerInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HServerInfo.java index cb2d9c3a2e3..5d043f9d2d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HServerInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HServerInfo.java @@ -33,11 +33,11 @@ import org.apache.hadoop.io.WritableComparable; * {@link HServerAddress}, its webui port, and its server startcode. It was * used to pass meta info about a server across an RPC but we've since made * it so regionserver info is up in ZooKeeper and so this class is on its - * way out. It used to carry {@link HServerLoad} but as off HBase 0.92.0, the + * way out. It used to carry HServerLoad but as off HBase 0.92.0, the * HServerLoad is passed independent of this class. Also, we now no longer pass * the webui from regionserver to master (TODO: Fix). * @deprecated Use {@link InetSocketAddress} and or {@link ServerName} and or - * {@link HServerLoad} + * HServerLoad */ public class HServerInfo extends VersionedWritable implements WritableComparable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index 8a8f655edce..fc5c1828a62 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -899,7 +899,7 @@ public class HTableDescriptor implements WritableComparable { /** * INTERNAL This method is a part of {@link WritableComparable} interface * and is used for serialization of the HTableDescriptor over RPC - * @deprecated Writables are going away. Use pb {@link #toByteArray()(byte[])} instead. + * @deprecated Writables are going away. Use pb {@link #toByteArray(byte[])} instead. */ @Deprecated @Override @@ -1231,7 +1231,7 @@ public class HTableDescriptor implements WritableComparable { /** * @return This instance serialized with pb with pb magic prefix - * @see {@link #parseFrom(byte[])} + * @see #parseFrom(byte[]) */ public byte [] toByteArray() { return ProtobufUtil.prependPBMagic(convert().toByteArray()); @@ -1242,7 +1242,7 @@ public class HTableDescriptor implements WritableComparable { * @return An instance of {@link HTableDescriptor} made from bytes * @throws DeserializationException * @throws IOException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static HTableDescriptor parseFrom(final byte [] bytes) throws DeserializationException, IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java index d4952953b41..cba99e2f7b7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java @@ -153,7 +153,7 @@ public interface MasterAdminProtocol extends /** * Unassign a region from current hosting regionserver. Region will then be * assigned to a regionserver chosen at random. Region could be reassigned - * back to the same server. Use {@link #moveRegion(RpcController,MoveRegionRequest} + * back to the same server. Use {@link #moveRegion(RpcController,MoveRegionRequest)} * if you want to control the region movement. * @param controller Unused (set to null). * @param req The request that contains:
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/RegionTransition.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/RegionTransition.java index ccc22fd47e6..8d32fca76b8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/RegionTransition.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/RegionTransition.java @@ -31,7 +31,7 @@ import com.google.protobuf.InvalidProtocolBufferException; * Current state of a region in transition. Holds state of a region as it moves through the * steps that take it from offline to open, etc. Used by regionserver, master, and zk packages. * Encapsulates protobuf serialization/deserialization so we don't leak generated pb outside this - * class. Create an instance using {@link #createRegionTransition(EventType, byte[], ServerName)}. + * class. Create an instance using createRegionTransition(EventType, byte[], ServerName). *

Immutable */ @InterfaceAudience.Private @@ -85,7 +85,6 @@ public class RegionTransition { * @param regionName * @param sn * @return a serialized pb {@link RegionTransition} - * @see #parseRegionTransition(byte[]) */ public static RegionTransition createRegionTransition(final EventType type, final byte [] regionName, final ServerName sn) { @@ -98,7 +97,6 @@ public class RegionTransition { * @param sn * @param payload May be null * @return a serialized pb {@link RegionTransition} - * @see #parseRegionTransition(byte[]) */ public static RegionTransition createRegionTransition(final EventType type, final byte [] regionName, final ServerName sn, final byte [] payload) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 34c380c032a..3d3bd39bdad 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -458,7 +458,7 @@ public class HBaseAdmin implements Abortable, Closeable { /** * Creates a new table but does not block and wait for it to come online. * Asynchronous operation. To check if the table exists, use - * {@link: #isTableAvailable} -- it is not safe to create an HTable + * {@link: #isTableAvailable()} -- it is not safe to create an HTable * instance to this table before it is available. * Note : Avoid passing empty split key. * @param desc table descriptor for table diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java index fb438e0884b..17e12cd6a8b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java @@ -252,7 +252,6 @@ public interface HConnection extends Abortable, Closeable { * @return an object of type T * @throws IOException if a remote or network exception occurs * @throws RuntimeException other unspecified error - * @deprecated Use {@link HConnectionManager#withoutRetries(ServerCallable)} */ @Deprecated public T getRegionServerWithRetries(ServerCallable callable) @@ -266,7 +265,6 @@ public interface HConnection extends Abortable, Closeable { * @return an object of type T * @throws IOException if a remote or network exception occurs * @throws RuntimeException other unspecified error - * @deprecated Use {@link HConnectionManager#withoutRetries(ServerCallable)} */ @Deprecated public T getRegionServerWithoutRetries(ServerCallable callable)