From eea9873ceff60381d50799994e260e8319ee68a7 Mon Sep 17 00:00:00 2001 From: stack Date: Sat, 7 Feb 2015 17:05:11 -0800 Subject: [PATCH] HBASE-12985 Javadoc warning and findbugs fixes to get us green again --- .../org/apache/hadoop/hbase/thrift/HttpDoAsClient.java | 6 ------ .../java/org/apache/hadoop/hbase/SplitLogCounters.java | 2 +- .../hadoop/hbase/client/ClientSideRegionScanner.java | 5 +---- .../java/org/apache/hadoop/hbase/ipc/RpcServer.java | 5 +++-- .../hadoop/hbase/master/balancer/BaseLoadBalancer.java | 6 ++---- .../hbase/procedure/flush/FlushTableSubprocedure.java | 2 +- .../hadoop/hbase/regionserver/HRegionServer.java | 2 +- .../hadoop/hbase/regionserver/wal/WALCellCodec.java | 10 +++++----- .../hadoop/hbase/security/token/ZKSecretWatcher.java | 2 +- 9 files changed, 15 insertions(+), 25 deletions(-) diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java index 9da79acb76c..9ef1bd28df9 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/thrift/HttpDoAsClient.java @@ -248,12 +248,6 @@ public class HttpDoAsClient { System.out.println("row: " + utf8(rowResult.row.array()) + ", cols: " + rowStr); } - private void printRow(List rows) { - for (TRowResult rowResult : rows) { - printRow(rowResult); - } - } - static Subject getSubject() throws Exception { if (!secure) return new Subject(); /* diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogCounters.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogCounters.java index f1a8c59d36c..6af5045a12c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogCounters.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/SplitLogCounters.java @@ -86,7 +86,7 @@ public class SplitLogCounters { public final static AtomicLong tot_wkr_task_grabing = new AtomicLong(0); public static void resetCounters() throws Exception { - Class cl = (new SplitLogCounters()).getClass(); + Class cl = SplitLogCounters.class; for (Field fld : cl.getDeclaredFields()) { if (!fld.isSynthetic()) ((AtomicLong)fld.get(null)).set(0); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java index ff34460d621..2bab21b7acb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ClientSideRegionScanner.java @@ -43,16 +43,13 @@ import org.mortbay.log.Log; public class ClientSideRegionScanner extends AbstractClientScanner { private HRegion region; - private Scan scan; RegionScanner scanner; List values; public ClientSideRegionScanner(Configuration conf, FileSystem fs, - Path rootDir, HTableDescriptor htd, HRegionInfo hri, Scan scan, ScanMetrics scanMetrics) + Path rootDir, HTableDescriptor htd, HRegionInfo hri, Scan scan, ScanMetrics scanMetrics) throws IOException { - this.scan = scan; - // region is immutable, set isolation level scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index b469facbc31..fac1ac964fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -1874,8 +1874,9 @@ public class RpcServer implements RpcServerInterface { * instance else pass null for no authentication check. * @param name Used keying this rpc servers' metrics and for naming the Listener thread. * @param services A list of services. - * @param bindAddres Where to listen - * @throws IOException + * @param bindAddress Where to listen + * @param conf + * @param scheduler */ public RpcServer(final Server server, final String name, final List services, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java index dea0828f8e6..f527931340b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java @@ -61,8 +61,8 @@ import com.google.common.collect.Sets; /** * The base class for load balancers. It provides the the functions used to by - * {@link org.apache.hadoop.hbase.master.AssignmentManager} to assign regions - * in the edge cases. It doesn't provide an implementation of the + * {@link org.apache.hadoop.hbase.master.AssignmentManager} to assign regions + * in the edge cases. It doesn't provide an implementation of the * actual balancing algorithm. * */ @@ -138,8 +138,6 @@ public abstract class BaseLoadBalancer implements LoadBalancer { int numRegions; int numMovedRegions = 0; //num moved regions from the initial configuration - // num of moved regions away from master that should be on the master - int numMovedMetaRegions = 0; //num of moved regions that are META Map> clusterState; protected final RackManager rackManager; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java index cfc6807031f..d032ebadb43 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/FlushTableSubprocedure.java @@ -53,7 +53,7 @@ public class FlushTableSubprocedure extends Subprocedure { this.taskManager = taskManager; } - private class RegionFlushTask implements Callable { + private static class RegionFlushTask implements Callable { HRegion region; RegionFlushTask(HRegion region) { this.region = region; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index fcb8c6fd39e..90b29ef88b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -1475,7 +1475,7 @@ public class HRegionServer extends HasThread implements } } - class PeriodicMemstoreFlusher extends ScheduledChore { + static class PeriodicMemstoreFlusher extends ScheduledChore { final HRegionServer server; final static int RANGE_OF_DELAY = 20000; //millisec final static int MIN_DELAY_TIME = 3000; //millisec diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 56137e8d975..a80c5087464 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -82,7 +82,7 @@ public class WALCellCodec implements Codec { static String getWALCellCodecClass(Configuration conf) { return conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName()); } - + /** * Create and setup a {@link WALCellCodec} from the {@code cellCodecClsName} and * CompressionContext, if {@code cellCodecClsName} is specified. @@ -106,7 +106,7 @@ public class WALCellCodec implements Codec { } /** - * Create and setup a {@link WALCellCodec} from the + * Create and setup a {@link WALCellCodec} from the * CompressionContext. * Cell Codec classname is read from {@link Configuration}. * Fully prepares the codec for use. @@ -122,7 +122,7 @@ public class WALCellCodec implements Codec { return ReflectionUtils.instantiateWithCustomCtor(cellCodecClsName, new Class[] { Configuration.class, CompressionContext.class }, new Object[] { conf, compression }); } - + public interface ByteStringCompressor { ByteString compress(byte[] data, Dictionary dict) throws IOException; } @@ -249,7 +249,7 @@ public class WALCellCodec implements Codec { protected Cell parseCell() throws IOException { int keylength = StreamUtils.readRawVarint32(in); int vlength = StreamUtils.readRawVarint32(in); - + int tagsLength = StreamUtils.readRawVarint32(in); int length = 0; if(tagsLength == 0) { @@ -328,7 +328,7 @@ public class WALCellCodec implements Codec { } } - public class EnsureKvEncoder extends BaseEncoder { + public static class EnsureKvEncoder extends BaseEncoder { public EnsureKvEncoder(OutputStream out) { super(out); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java index c9196aaad76..5fb3d40a586 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java @@ -85,7 +85,7 @@ public class ZKSecretWatcher extends ZooKeeperListener { if (keysParentZNode.equals(ZKUtil.getParent(path))) { String keyId = ZKUtil.getNodeName(path); try { - Integer id = new Integer(keyId); + Integer id = Integer.valueOf(keyId); secretManager.removeKey(id); } catch (NumberFormatException nfe) { LOG.error("Invalid znode name for key ID '"+keyId+"'", nfe);