diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
index 8acb674b509..e2f1fae3398 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
@@ -172,7 +172,7 @@ public class MetaReader {
/**
* Callers should call close on the returned {@link HTable} instance.
* @param catalogTracker
- * @return
+ * @return An {@link HTable} for hbase:meta
* @throws IOException
*/
static HTable getCatalogHTable(final CatalogTracker catalogTracker)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
index 225a8e48584..366d1760bcd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
@@ -536,7 +536,7 @@ public class AggregationClient {
* std.
* @param table
* @param scan
- * @return
+ * @return standard deviations
* @throws Throwable
*/
private
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
index 03a9e817c4e..380baa8f686 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java
@@ -158,7 +158,6 @@ public class Tag {
/**
* Returns the total length of the entire tag entity
- * @return
*/
short getLength() {
return this.length;
@@ -166,7 +165,6 @@ public class Tag {
/**
* Returns the offset of the entire tag entity
- * @return
*/
int getOffset() {
return this.offset;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
index 042c606cecb..b6876864ca0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
@@ -77,7 +77,6 @@ public final class Compression {
/**
* Returns the classloader to load the Codec class from.
- * @return
*/
private static ClassLoader getClassLoaderForCodec() {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
index 0bc20c8614c..82f8f815c99 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
@@ -825,7 +825,7 @@ public class OrderedBytes {
* a value in Numeric encoding and is within the valid range of
* {@link BigDecimal} values. {@link BigDecimal} does not support {@code NaN}
* or {@code Infinte} values.
- * @see #decodeNumericAsDouble(byte[], int)
+ * @see #decodeNumericAsDouble(PositionedByteRange)
*/
private static BigDecimal decodeNumericValue(PositionedByteRange src) {
final int e;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
index 076174d12c6..79115be6fed 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
@@ -25,12 +25,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Server;
-import org.cloudera.htrace.Sampler;
import org.cloudera.htrace.Span;
import org.cloudera.htrace.Trace;
import org.cloudera.htrace.TraceScope;
-import org.cloudera.htrace.impl.AlwaysSampler;
-
/**
* Abstract base class for all HBase event handlers. Subclasses should
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
index 5ada896b7a7..211a7704208 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
@@ -206,11 +206,10 @@ public class Reference {
}
/**
- * Use this instead of {@link #toByteArray()} when writing to a stream and you want to use
- * the pb mergeDelimitedFrom (w/o the delimiter, pb reads to EOF which may not be what ou want).
+ * Use this when writing to a stream and you want to use the pb mergeDelimitedFrom
+ * (w/o the delimiter, pb reads to EOF which may not be what you want).
* @return This instance serialized as a delimited protobuf w/ a magic pb prefix.
* @throws IOException
- * @see {@link #toByteArray()}
*/
byte [] toByteArray() throws IOException {
return ProtobufUtil.prependPBMagic(convert().toByteArray());
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
index 345345ae567..255339016da 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
@@ -729,7 +729,7 @@ public class HFile {
* We write it as a protobuf.
* @param out
* @throws IOException
- * @see {@link #read(DataInputStream)}
+ * @see #read(DataInputStream)
*/
void write(final DataOutputStream out) throws IOException {
HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
@@ -748,7 +748,7 @@ public class HFile {
* Can deserialize protobuf of old Writables format.
* @param in
* @throws IOException
- * @see {@link #write(DataOutputStream)}
+ * @see #write(DataOutputStream)
*/
void read(final DataInputStream in) throws IOException {
// This code is tested over in TestHFileReaderV1 where we read an old hfile w/ this new code.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
index 3bc1edb17fa..e29c94b94bd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
@@ -244,7 +244,7 @@ public class BucketCache implements BlockCache, HeapSize {
* Get the IOEngine from the IO engine name
* @param ioEngineName
* @param capacity
- * @return
+ * @return the IOEngine
* @throws IOException
*/
private IOEngine getIOEngineFromName(String ioEngineName, long capacity)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/migration/UpgradeTo96.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/migration/UpgradeTo96.java
index 350c98db74b..46f36106914 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/migration/UpgradeTo96.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/migration/UpgradeTo96.java
@@ -206,7 +206,6 @@ public class UpgradeTo96 extends Configured implements Tool {
* Upgrading Znodes
* Log splitting
*
- * @return
* @throws Exception
*/
private int executeUpgrade() throws Exception {
@@ -230,7 +229,6 @@ public class UpgradeTo96 extends Configured implements Tool {
/**
* Performs log splitting for all regionserver directories.
- * @return
* @throws Exception
*/
private void doOfflineLogSplitting() throws Exception {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
index 7658b608583..6744167527b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
@@ -222,7 +222,7 @@ public class ProcedureCoordinator {
* @param procName
* @param procArgs
* @param expectedMembers
- * @return
+ * @return the newly created procedure
*/
Procedure createProcedure(ForeignExceptionDispatcher fed, String procName, byte[] procArgs,
List expectedMembers) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index fc53c9a76d4..487fb089e8c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import javax.management.ObjectName;
-
import java.io.IOException;
import java.lang.Thread.UncaughtExceptionHandler;
import java.lang.annotation.Retention;
@@ -38,7 +36,6 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.UUID;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
@@ -50,6 +47,8 @@ import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import javax.management.ObjectName;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -65,7 +64,6 @@ import org.apache.hadoop.hbase.ClockOutOfSyncException;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HealthCheckChore;
@@ -90,7 +88,6 @@ import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -120,7 +117,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest;
@@ -138,7 +134,6 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRespon
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState;
@@ -152,6 +147,7 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
@@ -185,7 +181,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Regio
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService;
import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest;
-import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
import org.apache.hadoop.hbase.regionserver.HRegion.Operation;
import org.apache.hadoop.hbase.regionserver.Leases.LeaseStillHeldException;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress;
@@ -4407,7 +4402,6 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa
/**
* Return the last failed RS name under /hbase/recovering-regions/encodedRegionName
* @param encodedRegionName
- * @return
* @throws IOException
* @throws KeeperException
*/
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
index 734140c0ca7..ea1fc20b4a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
@@ -135,7 +135,7 @@ public class MemStore implements HeapSize {
/**
* Creates a snapshot of the current memstore.
- * Snapshot must be cleared by call to {@link #clearSnapshot(SortedSet)}
+ * Snapshot must be cleared by call to {@link #clearSnapshot(SortedSet)}
* To get the snapshot made by this method, use {@link #getSnapshot()}
*/
void snapshot() {
@@ -172,8 +172,8 @@ public class MemStore implements HeapSize {
* Called by flusher to get current snapshot made by a previous
* call to {@link #snapshot()}
* @return Return snapshot.
- * @see {@link #snapshot()}
- * @see {@link #clearSnapshot(SortedSet)}
+ * @see #snapshot()
+ * @see #clearSnapshot(SortedSet)
*/
KeyValueSkipListSet getSnapshot() {
return this.snapshot;
@@ -183,7 +183,7 @@ public class MemStore implements HeapSize {
* The passed snapshot was successfully persisted; it can be let go.
* @param ss The snapshot to clean out.
* @throws UnexpectedException
- * @see {@link #snapshot()}
+ * @see #snapshot()
*/
void clearSnapshot(final SortedSet ss)
throws UnexpectedException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
index a02c38d0601..2f5d23d3fd1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
@@ -56,14 +56,14 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.ConnectionUtils;
import org.apache.hadoop.hbase.client.Delete;
@@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.exceptions.RegionOpeningException;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.master.SplitLogManager;
@@ -79,12 +78,12 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionStoreSequenceIds;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.StoreSequenceId;
@@ -648,7 +647,6 @@ public class HLogSplitter {
/**
* Get current open writers
- * @return
*/
private int getNumOpenWriters() {
int result = 0;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 9fc06e65c08..a77b88d7718 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -202,7 +202,7 @@ public class AccessController extends BaseRegionObserver
* @param e the coprocessor environment
* @param families the map of column families to qualifiers present in
* the request
- * @return
+ * @return an authorization result
*/
AuthResult permissionGranted(String request, User user, Permission.Action permRequest,
RegionCoprocessorEnvironment e,
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index 971253b49a5..0e3f44fc56e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ -276,7 +276,7 @@ public class TableAuthManager {
* Authorizes a global permission
* @param perms
* @param action
- * @return
+ * @return true if authorized, false otherwise
*/
private boolean authorize(List perms, Permission.Action action) {
if (perms != null) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 031713c6180..5c0de0a018e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -858,7 +858,7 @@ public class HBaseFsck extends Configured implements Tool {
* To get the column family list according to the column family dirs
* @param columns
* @param hbi
- * @return
+ * @return a set of column families
* @throws IOException
*/
private Set getColumnFamilyList(Set columns, HbckInfo hbi) throws IOException {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileV1Detector.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileV1Detector.java
index e74d6ca1b21..51bd117a69f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileV1Detector.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileV1Detector.java
@@ -250,7 +250,6 @@ public class HFileV1Detector extends Configured implements Tool {
/**
* Ignore ROOT table as it doesn't exist in 0.96.
* @param path
- * @return
*/
private boolean isRootTable(Path path) {
if (path != null && path.toString().endsWith("-ROOT-")) return true;
@@ -385,7 +384,6 @@ public class HFileV1Detector extends Configured implements Tool {
/**
* Removes the prefix of defaultNamespace from the path.
* @param originPath
- * @return
*/
private String removeDefaultNSPath(Path originalPath) {
String pathStr = originalPath.toString();
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 6b6278b8117..c562924ea0c 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -433,7 +433,6 @@ public class ThriftServerRunner implements Runnable {
* Returns a list of all the column families for a given htable.
*
* @param table
- * @return
* @throws IOException
*/
byte[][] getAllColumns(HTable table) throws IOException {