From be245101ac4a9456b2e77a93142b4c300fd8a327 Mon Sep 17 00:00:00 2001 From: Jonathan Hsieh Date: Thu, 5 Apr 2012 23:05:07 +0000 Subject: [PATCH] HBASE-5644 [findbugs] Fix null pointer warnings (Uma Maheswara Rao G) git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1310125 13f79535-47bb-0310-9956-ffa450edef68 --- dev-support/findbugs-exclude.xml | 18 ++++++++++- dev-support/test-patch.properties | 2 +- .../tmpl/regionserver/RSStatusTmpl.jamon | 8 ++--- .../apache/hadoop/hbase/client/HTable.java | 28 +++++++++-------- .../hadoop/hbase/mapreduce/CellCounter.java | 30 ++++++++++--------- .../hadoop/hbase/regionserver/HRegion.java | 7 +++-- .../hbase/regionserver/ShutdownHook.java | 6 ++++ .../hadoop/hbase/regionserver/Store.java | 6 ++-- .../hadoop/hbase/regionserver/StoreFile.java | 17 ++++++----- .../metrics/SchemaConfigured.java | 8 +++-- .../org/apache/hadoop/hbase/util/Merge.java | 17 +++++++---- 11 files changed, 92 insertions(+), 55 deletions(-) diff --git a/dev-support/findbugs-exclude.xml b/dev-support/findbugs-exclude.xml index 7912e79b579..f15240f5426 100644 --- a/dev-support/findbugs-exclude.xml +++ b/dev-support/findbugs-exclude.xml @@ -30,6 +30,22 @@ - + + + + + + + + + + + + + + + + + diff --git a/dev-support/test-patch.properties b/dev-support/test-patch.properties index 2209d270f57..203df69e75a 100644 --- a/dev-support/test-patch.properties +++ b/dev-support/test-patch.properties @@ -19,5 +19,5 @@ MAVEN_OPTS="-Xmx3g" # Please update the per-module test-patch.properties if you update this file. OK_RELEASEAUDIT_WARNINGS=84 -OK_FINDBUGS_WARNINGS=601 +OK_FINDBUGS_WARNINGS=585 OK_JAVADOC_WARNINGS=169 diff --git a/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon b/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon index ae762048cae..79d9c2f60bc 100644 --- a/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon +++ b/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon @@ -44,12 +44,8 @@ org.apache.hadoop.hbase.HBaseConfiguration; <%java> HServerInfo serverInfo = null; ServerName serverName = null; - try { - serverInfo = regionServer.getHServerInfo(); - serverName = regionServer.getServerName(); - } catch (IOException e) { - e.printStackTrace(); - } + serverInfo = regionServer.getHServerInfo(); + serverName = regionServer.getServerName(); RegionServerMetrics metrics = regionServer.getMetrics(); List onlineRegions = regionServer.getOnlineRegions(); int interval = regionServer.getConfiguration().getInt("hbase.regionserver.msginterval", 3000)/1000; diff --git a/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 3db02955ef2..aa7652f6a07 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -746,12 +746,13 @@ public class HTable implements HTableInterface { @Override public void delete(final Delete delete) throws IOException { - new ServerCallable(connection, tableName, delete.getRow(), operationTimeout) { - public Boolean call() throws IOException { - server.delete(location.getRegionInfo().getRegionName(), delete); - return null; // FindBugs NP_BOOLEAN_RETURN_NULL - } - }.withRetries(); + new ServerCallable(connection, tableName, delete.getRow(), + operationTimeout) { + public Void call() throws IOException { + server.delete(location.getRegionInfo().getRegionName(), delete); + return null; + } + }.withRetries(); } /** @@ -1038,13 +1039,14 @@ public class HTable implements HTableInterface { @Override public void unlockRow(final RowLock rl) throws IOException { - new ServerCallable(connection, tableName, rl.getRow(), operationTimeout) { - public Boolean call() throws IOException { - server.unlockRow(location.getRegionInfo().getRegionName(), - rl.getLockId()); - return null; // FindBugs NP_BOOLEAN_RETURN_NULL - } - }.withRetries(); + new ServerCallable(connection, tableName, rl.getRow(), + operationTimeout) { + public Void call() throws IOException { + server.unlockRow(location.getRegionInfo().getRegionName(), rl + .getLockId()); + return null; + } + }.withRetries(); } /** diff --git a/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index 32d66fb77e2..63b2ed5b079 100644 --- a/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -23,27 +23,30 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.*; +import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.RegexStringComparator; +import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; -import org.apache.hadoop.util.GenericOptionsParser; +import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.io.Text; +import org.apache.hadoop.util.GenericOptionsParser; + +import com.google.common.base.Preconditions; /** @@ -102,17 +105,16 @@ public class CellCounter { public void map(ImmutableBytesWritable row, Result values, Context context) throws IOException { + Preconditions.checkState(values != null, + "values passed to the map is null"); String currentFamilyName = null; String currentQualifierName = null; String currentRowKey = null; Configuration config = context.getConfiguration(); String separator = config.get("ReportSeparator",":"); - try { - if (values != null) { - context.getCounter(Counters.ROWS).increment(1); - context.write(new Text("Total ROWS"), new IntWritable(1)); - } + context.getCounter(Counters.ROWS).increment(1); + context.write(new Text("Total ROWS"), new IntWritable(1)); for (KeyValue value : values.list()) { currentRowKey = Bytes.toStringBinary(value.getRow()); diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 1d11f8fa5d5..f1a68e0c52a 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -634,8 +634,9 @@ public class HRegion implements HeapSize { // , Writable{ // being split but we crashed in the middle of it all. SplitTransaction.cleanupAnySplitDetritus(this); FSUtils.deleteDirectory(this.fs, new Path(regiondir, MERGEDIR)); - - this.writestate.setReadOnly(this.htableDescriptor.isReadOnly()); + if (this.htableDescriptor != null) { + this.writestate.setReadOnly(this.htableDescriptor.isReadOnly()); + } this.writestate.flushRequested = false; this.writestate.compacting = 0; @@ -4987,7 +4988,7 @@ public class HRegion implements HeapSize { // , Writable{ // detect the actual protocol class protocol = protocolHandlerNames.get(protocolName); if (protocol == null) { - throw new HBaseRPC.UnknownProtocolException(protocol, + throw new HBaseRPC.UnknownProtocolException(null, "No matching handler for protocol "+protocolName+ " in region "+Bytes.toStringBinary(getRegionName())); } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java b/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java index e3b230e05ed..00f8d990c79 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java @@ -168,6 +168,12 @@ public class ShutdownHook { break; } } + + if (cache == null) { + throw new RuntimeException( + "This should not happen. Could not find the cache class in FileSystem."); + } + Field field = null; try { field = cache.getDeclaredField(CLIENT_FINALIZER_DATA_METHOD); diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index 0c7b3969c8e..509a4677812 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -494,10 +494,10 @@ public class Store extends SchemaConfigured implements HeapSize { reader.loadFileInfo(); byte[] firstKey = reader.getFirstRowKey(); + Preconditions.checkState(firstKey != null, "First key can not be null"); byte[] lk = reader.getLastKey(); - byte[] lastKey = - (lk == null) ? null : - KeyValue.createKeyValueFromKey(lk).getRow(); + Preconditions.checkState(lk != null, "Last key can not be null"); + byte[] lastKey = KeyValue.createKeyValueFromKey(lk).getRow(); LOG.debug("HFile bounds: first=" + Bytes.toStringBinary(firstKey) + " last=" + Bytes.toStringBinary(lastKey)); diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index 2e98b393f47..1636bfb5a0b 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -1758,12 +1758,15 @@ public class StoreFile extends SchemaConfigured { /** * FILE_SIZE = descending sort StoreFiles (largest --> smallest in size) */ - static final Comparator FILE_SIZE = - Ordering.natural().reverse().onResultOf(new Function() { - @Override - public Long apply(StoreFile sf) { - return sf.getReader().length(); - } - }); + static final Comparator FILE_SIZE = Ordering.natural().reverse() + .onResultOf(new Function() { + @Override + public Long apply(StoreFile sf) { + if (sf == null) { + throw new IllegalArgumentException("StorFile can not be null"); + } + return sf.getReader().length(); + } + }); } } diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java index 945d8024c93..259e32a0c95 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java @@ -157,8 +157,12 @@ public class SchemaConfigured implements HeapSize, SchemaAware { public SchemaConfigured(Configuration conf, String tableName, String cfName) { this(conf); - this.tableName = tableName != null ? tableName.intern() : tableName; - this.cfName = cfName != null ? cfName.intern() : cfName; + if (tableName != null) { + this.tableName = tableName.intern(); + } + if (cfName != null) { + this.cfName = cfName.intern(); + } } public SchemaConfigured(SchemaAware that) { diff --git a/src/main/java/org/apache/hadoop/hbase/util/Merge.java b/src/main/java/org/apache/hadoop/hbase/util/Merge.java index 04f15d4ae42..0800099e484 100644 --- a/src/main/java/org/apache/hadoop/hbase/util/Merge.java +++ b/src/main/java/org/apache/hadoop/hbase/util/Merge.java @@ -45,6 +45,8 @@ import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import com.google.common.base.Preconditions; + import java.io.IOException; import java.util.List; @@ -152,12 +154,14 @@ public class Merge extends Configured implements Tool { Get get = new Get(region1); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); List cells1 = rootRegion.get(get, null).list(); - HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue()); + Preconditions.checkState(cells1 != null, "First region cells can not be null"); + HRegionInfo info1 = Writables.getHRegionInfo(cells1.get(0).getValue()); get = new Get(region2); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); List cells2 = rootRegion.get(get, null).list(); - HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null: cells2.get(0).getValue()); + Preconditions.checkState(cells2 != null, "Second region cells can not be null"); + HRegionInfo info2 = Writables.getHRegionInfo(cells2.get(0).getValue()); HRegion merged = merge(HTableDescriptor.META_TABLEDESC, info1, rootRegion, info2, rootRegion); LOG.info("Adding " + merged.getRegionInfo() + " to " + rootRegion.getRegionInfo()); @@ -221,8 +225,9 @@ public class Merge extends Configured implements Tool { Get get = new Get(region1); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); List cells1 = metaRegion1.get(get, null).list(); - HRegionInfo info1 = - Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue()); + Preconditions.checkState(cells1 != null, + "First region cells can not be null"); + HRegionInfo info1 = Writables.getHRegionInfo(cells1.get(0).getValue()); if (info1 == null) { throw new NullPointerException("info1 is null using key " + Bytes.toStringBinary(region1) + " in " + meta1); @@ -237,7 +242,9 @@ public class Merge extends Configured implements Tool { get = new Get(region2); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); List cells2 = metaRegion2.get(get, null).list(); - HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null: cells2.get(0).getValue()); + Preconditions.checkState(cells2 != null, + "Second region cells can not be null"); + HRegionInfo info2 = Writables.getHRegionInfo(cells2.get(0).getValue()); if (info2 == null) { throw new NullPointerException("info2 is null using key " + meta2); }