HBASE-5644 [findbugs] Fix null pointer warnings (Uma Maheswara Rao G)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1310125 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2012-04-05 23:05:07 +00:00
parent fa97600ea9
commit be245101ac
11 changed files with 92 additions and 55 deletions

View File

@ -30,6 +30,22 @@
<Match> <Match>
<Package name="org.apache.hadoop.hbase.protobuf.generated"/> <Package name="org.apache.hadoop.hbase.protobuf.generated"/>
</Match> </Match>
<Match>
<Class name="org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost" />
<Or>
<Method name="preExists" />
<Method name="preCheckAndPut" />
<Method name="preCheckAndDelete" />
<Method name="preScannerNext" />
</Or>
<Bug pattern="NP_BOOLEAN_RETURN_NULL" />
</Match>
<Match>
<Class name="org.apache.hadoop.hbase.regionserver.StoreFile$Writer" />
<Bug pattern="NP_NULL_PARAM_DEREF" />
</Match>
</FindBugsFilter> </FindBugsFilter>

View File

@ -19,5 +19,5 @@ MAVEN_OPTS="-Xmx3g"
# Please update the per-module test-patch.properties if you update this file. # Please update the per-module test-patch.properties if you update this file.
OK_RELEASEAUDIT_WARNINGS=84 OK_RELEASEAUDIT_WARNINGS=84
OK_FINDBUGS_WARNINGS=601 OK_FINDBUGS_WARNINGS=585
OK_JAVADOC_WARNINGS=169 OK_JAVADOC_WARNINGS=169

View File

@ -44,12 +44,8 @@ org.apache.hadoop.hbase.HBaseConfiguration;
<%java> <%java>
HServerInfo serverInfo = null; HServerInfo serverInfo = null;
ServerName serverName = null; ServerName serverName = null;
try { serverInfo = regionServer.getHServerInfo();
serverInfo = regionServer.getHServerInfo(); serverName = regionServer.getServerName();
serverName = regionServer.getServerName();
} catch (IOException e) {
e.printStackTrace();
}
RegionServerMetrics metrics = regionServer.getMetrics(); RegionServerMetrics metrics = regionServer.getMetrics();
List<HRegionInfo> onlineRegions = regionServer.getOnlineRegions(); List<HRegionInfo> onlineRegions = regionServer.getOnlineRegions();
int interval = regionServer.getConfiguration().getInt("hbase.regionserver.msginterval", 3000)/1000; int interval = regionServer.getConfiguration().getInt("hbase.regionserver.msginterval", 3000)/1000;

View File

@ -746,12 +746,13 @@ public class HTable implements HTableInterface {
@Override @Override
public void delete(final Delete delete) public void delete(final Delete delete)
throws IOException { throws IOException {
new ServerCallable<Boolean>(connection, tableName, delete.getRow(), operationTimeout) { new ServerCallable<Void>(connection, tableName, delete.getRow(),
public Boolean call() throws IOException { operationTimeout) {
server.delete(location.getRegionInfo().getRegionName(), delete); public Void call() throws IOException {
return null; // FindBugs NP_BOOLEAN_RETURN_NULL server.delete(location.getRegionInfo().getRegionName(), delete);
} return null;
}.withRetries(); }
}.withRetries();
} }
/** /**
@ -1038,13 +1039,14 @@ public class HTable implements HTableInterface {
@Override @Override
public void unlockRow(final RowLock rl) public void unlockRow(final RowLock rl)
throws IOException { throws IOException {
new ServerCallable<Boolean>(connection, tableName, rl.getRow(), operationTimeout) { new ServerCallable<Void>(connection, tableName, rl.getRow(),
public Boolean call() throws IOException { operationTimeout) {
server.unlockRow(location.getRegionInfo().getRegionName(), public Void call() throws IOException {
rl.getLockId()); server.unlockRow(location.getRegionInfo().getRegionName(), rl
return null; // FindBugs NP_BOOLEAN_RETURN_NULL .getLockId());
} return null;
}.withRetries(); }
}.withRetries();
} }
/** /**

View File

@ -23,27 +23,30 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.*; import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
import org.apache.hadoop.io.IntWritable; import com.google.common.base.Preconditions;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.io.Text;
/** /**
@ -102,17 +105,16 @@ public class CellCounter {
public void map(ImmutableBytesWritable row, Result values, public void map(ImmutableBytesWritable row, Result values,
Context context) Context context)
throws IOException { throws IOException {
Preconditions.checkState(values != null,
"values passed to the map is null");
String currentFamilyName = null; String currentFamilyName = null;
String currentQualifierName = null; String currentQualifierName = null;
String currentRowKey = null; String currentRowKey = null;
Configuration config = context.getConfiguration(); Configuration config = context.getConfiguration();
String separator = config.get("ReportSeparator",":"); String separator = config.get("ReportSeparator",":");
try { try {
if (values != null) { context.getCounter(Counters.ROWS).increment(1);
context.getCounter(Counters.ROWS).increment(1); context.write(new Text("Total ROWS"), new IntWritable(1));
context.write(new Text("Total ROWS"), new IntWritable(1));
}
for (KeyValue value : values.list()) { for (KeyValue value : values.list()) {
currentRowKey = Bytes.toStringBinary(value.getRow()); currentRowKey = Bytes.toStringBinary(value.getRow());

View File

@ -634,8 +634,9 @@ public class HRegion implements HeapSize { // , Writable{
// being split but we crashed in the middle of it all. // being split but we crashed in the middle of it all.
SplitTransaction.cleanupAnySplitDetritus(this); SplitTransaction.cleanupAnySplitDetritus(this);
FSUtils.deleteDirectory(this.fs, new Path(regiondir, MERGEDIR)); FSUtils.deleteDirectory(this.fs, new Path(regiondir, MERGEDIR));
if (this.htableDescriptor != null) {
this.writestate.setReadOnly(this.htableDescriptor.isReadOnly()); this.writestate.setReadOnly(this.htableDescriptor.isReadOnly());
}
this.writestate.flushRequested = false; this.writestate.flushRequested = false;
this.writestate.compacting = 0; this.writestate.compacting = 0;
@ -4987,7 +4988,7 @@ public class HRegion implements HeapSize { // , Writable{
// detect the actual protocol class // detect the actual protocol class
protocol = protocolHandlerNames.get(protocolName); protocol = protocolHandlerNames.get(protocolName);
if (protocol == null) { if (protocol == null) {
throw new HBaseRPC.UnknownProtocolException(protocol, throw new HBaseRPC.UnknownProtocolException(null,
"No matching handler for protocol "+protocolName+ "No matching handler for protocol "+protocolName+
" in region "+Bytes.toStringBinary(getRegionName())); " in region "+Bytes.toStringBinary(getRegionName()));
} }

View File

@ -168,6 +168,12 @@ public class ShutdownHook {
break; break;
} }
} }
if (cache == null) {
throw new RuntimeException(
"This should not happen. Could not find the cache class in FileSystem.");
}
Field field = null; Field field = null;
try { try {
field = cache.getDeclaredField(CLIENT_FINALIZER_DATA_METHOD); field = cache.getDeclaredField(CLIENT_FINALIZER_DATA_METHOD);

View File

@ -494,10 +494,10 @@ public class Store extends SchemaConfigured implements HeapSize {
reader.loadFileInfo(); reader.loadFileInfo();
byte[] firstKey = reader.getFirstRowKey(); byte[] firstKey = reader.getFirstRowKey();
Preconditions.checkState(firstKey != null, "First key can not be null");
byte[] lk = reader.getLastKey(); byte[] lk = reader.getLastKey();
byte[] lastKey = Preconditions.checkState(lk != null, "Last key can not be null");
(lk == null) ? null : byte[] lastKey = KeyValue.createKeyValueFromKey(lk).getRow();
KeyValue.createKeyValueFromKey(lk).getRow();
LOG.debug("HFile bounds: first=" + Bytes.toStringBinary(firstKey) + LOG.debug("HFile bounds: first=" + Bytes.toStringBinary(firstKey) +
" last=" + Bytes.toStringBinary(lastKey)); " last=" + Bytes.toStringBinary(lastKey));

View File

@ -1758,12 +1758,15 @@ public class StoreFile extends SchemaConfigured {
/** /**
* FILE_SIZE = descending sort StoreFiles (largest --> smallest in size) * FILE_SIZE = descending sort StoreFiles (largest --> smallest in size)
*/ */
static final Comparator<StoreFile> FILE_SIZE = static final Comparator<StoreFile> FILE_SIZE = Ordering.natural().reverse()
Ordering.natural().reverse().onResultOf(new Function<StoreFile, Long>() { .onResultOf(new Function<StoreFile, Long>() {
@Override @Override
public Long apply(StoreFile sf) { public Long apply(StoreFile sf) {
return sf.getReader().length(); if (sf == null) {
} throw new IllegalArgumentException("StorFile can not be null");
}); }
return sf.getReader().length();
}
});
} }
} }

View File

@ -157,8 +157,12 @@ public class SchemaConfigured implements HeapSize, SchemaAware {
public SchemaConfigured(Configuration conf, String tableName, String cfName) public SchemaConfigured(Configuration conf, String tableName, String cfName)
{ {
this(conf); this(conf);
this.tableName = tableName != null ? tableName.intern() : tableName; if (tableName != null) {
this.cfName = cfName != null ? cfName.intern() : cfName; this.tableName = tableName.intern();
}
if (cfName != null) {
this.cfName = cfName.intern();
}
} }
public SchemaConfigured(SchemaAware that) { public SchemaConfigured(SchemaAware that) {

View File

@ -45,6 +45,8 @@ import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import com.google.common.base.Preconditions;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -152,12 +154,14 @@ public class Merge extends Configured implements Tool {
Get get = new Get(region1); Get get = new Get(region1);
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
List<KeyValue> cells1 = rootRegion.get(get, null).list(); List<KeyValue> cells1 = rootRegion.get(get, null).list();
HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue()); Preconditions.checkState(cells1 != null, "First region cells can not be null");
HRegionInfo info1 = Writables.getHRegionInfo(cells1.get(0).getValue());
get = new Get(region2); get = new Get(region2);
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
List<KeyValue> cells2 = rootRegion.get(get, null).list(); List<KeyValue> cells2 = rootRegion.get(get, null).list();
HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null: cells2.get(0).getValue()); Preconditions.checkState(cells2 != null, "Second region cells can not be null");
HRegionInfo info2 = Writables.getHRegionInfo(cells2.get(0).getValue());
HRegion merged = merge(HTableDescriptor.META_TABLEDESC, info1, rootRegion, info2, rootRegion); HRegion merged = merge(HTableDescriptor.META_TABLEDESC, info1, rootRegion, info2, rootRegion);
LOG.info("Adding " + merged.getRegionInfo() + " to " + LOG.info("Adding " + merged.getRegionInfo() + " to " +
rootRegion.getRegionInfo()); rootRegion.getRegionInfo());
@ -221,8 +225,9 @@ public class Merge extends Configured implements Tool {
Get get = new Get(region1); Get get = new Get(region1);
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
List<KeyValue> cells1 = metaRegion1.get(get, null).list(); List<KeyValue> cells1 = metaRegion1.get(get, null).list();
HRegionInfo info1 = Preconditions.checkState(cells1 != null,
Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue()); "First region cells can not be null");
HRegionInfo info1 = Writables.getHRegionInfo(cells1.get(0).getValue());
if (info1 == null) { if (info1 == null) {
throw new NullPointerException("info1 is null using key " + throw new NullPointerException("info1 is null using key " +
Bytes.toStringBinary(region1) + " in " + meta1); Bytes.toStringBinary(region1) + " in " + meta1);
@ -237,7 +242,9 @@ public class Merge extends Configured implements Tool {
get = new Get(region2); get = new Get(region2);
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER); get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
List<KeyValue> cells2 = metaRegion2.get(get, null).list(); List<KeyValue> cells2 = metaRegion2.get(get, null).list();
HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null: cells2.get(0).getValue()); Preconditions.checkState(cells2 != null,
"Second region cells can not be null");
HRegionInfo info2 = Writables.getHRegionInfo(cells2.get(0).getValue());
if (info2 == null) { if (info2 == null) {
throw new NullPointerException("info2 is null using key " + meta2); throw new NullPointerException("info2 is null using key " + meta2);
} }