diff --git a/CHANGES.txt b/CHANGES.txt index c36ccd497f1..ebe750ee122 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -12,8 +12,8 @@ Release 0.20.0 - Unreleased (Samuel Guo via Stack) HBASE-1130 PrefixRowFilter (Michael Gottesman via Stack) HBASE-1139 Update Clover in build.xml - HBASE-876 There are a large number of Java warnings in HBase; part 1 - (Evgeny Ryabitskiy via Stack) + HBASE-876 There are a large number of Java warnings in HBase; part 1, + part 2, and part 3 (Evgeny Ryabitskiy via Stack) Release 0.19.0 - Unreleased INCOMPATIBLE CHANGES diff --git a/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 77ac9def1ad..89266066504 100644 --- a/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.RegionException; import org.apache.hadoop.hbase.RemoteExceptionHandler; -import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.io.Cell; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.RowResult; @@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.util.Shell.ExitCodeException; /** * Provides administrative functions for HBase @@ -133,9 +131,10 @@ public class HBaseAdmin { } private long getPauseTime(int tries) { - if (tries >= HConstants.RETRY_BACKOFF.length) - tries = HConstants.RETRY_BACKOFF.length - 1; - return this.pause * HConstants.RETRY_BACKOFF[tries]; + int triesCount = tries; + if (triesCount >= HConstants.RETRY_BACKOFF.length) + triesCount = HConstants.RETRY_BACKOFF.length - 1; + return this.pause * HConstants.RETRY_BACKOFF[triesCount]; } /** @@ -534,8 +533,10 @@ public class HBaseAdmin { int xtraArgsCount = 1; Object [] newargs = new Object[len + xtraArgsCount]; newargs[0] = regionname; - for (int i = 0; i < len; i++) { - newargs[i + xtraArgsCount] = args[i]; + if(args != null) { + for (int i = 0; i < len; i++) { + newargs[i + xtraArgsCount] = args[i]; + } } modifyTable(HConstants.META_TABLE_NAME, HConstants.MODIFY_CLOSE_REGION, newargs); diff --git a/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java index 4c0ef5ef16f..1413de4f039 100644 --- a/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java +++ b/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java @@ -875,20 +875,21 @@ public class HConnectionManager implements HConstants { getRegionLocationForRowWithRetries(byte[] tableName, byte[] rowKey, boolean reload) throws IOException { + boolean reloadFlag = reload; getMaster(); List exceptions = new ArrayList(); HRegionLocation location = null; int tries = 0; while (tries < numRetries) { try { - location = getRegionLocation(tableName, rowKey, reload); + location = getRegionLocation(tableName, rowKey, reloadFlag); } catch (Throwable t) { exceptions.add(t); } if (location != null) { break; } - reload = true; + reloadFlag = true; tries++; try { Thread.sleep(getPauseTime(tries)); diff --git a/src/java/org/apache/hadoop/hbase/client/MetaScanner.java b/src/java/org/apache/hadoop/hbase/client/MetaScanner.java index bfc540e2918..6b5be17bf22 100644 --- a/src/java/org/apache/hadoop/hbase/client/MetaScanner.java +++ b/src/java/org/apache/hadoop/hbase/client/MetaScanner.java @@ -7,7 +7,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.RowResult; -import org.apache.hadoop.hbase.util.Bytes; //TODO: remove /** * Scanner class that contains the .META. table scanning logic diff --git a/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java b/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java index 6aed6ec8c26..7b319353c50 100644 --- a/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java +++ b/src/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java @@ -25,8 +25,10 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; /** * Thrown when a scanner has timed out. */ -@SuppressWarnings("serial") public class ScannerTimeoutException extends DoNotRetryIOException { + + private static final long serialVersionUID = 8788838690290688313L; + /** default constructor */ ScannerTimeoutException() { super(); diff --git a/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java index f96fcb16da4..ccdce6b6654 100644 --- a/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java +++ b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java @@ -15,55 +15,46 @@ public class UnmodifyableHColumnDescriptor extends HColumnDescriptor { } @Override - @SuppressWarnings("unused") public void setValue(byte[] key, byte[] value) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setValue(String key, String value) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setMaxVersions(int maxVersions) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setInMemory(boolean inMemory) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setBlockCacheEnabled(boolean blockCacheEnabled) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setMaxValueLength(int maxLength) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setTimeToLive(int timeToLive) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setCompressionType(CompressionType type) { throw new UnsupportedOperationException("HColumnDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setMapFileIndexInterval(int interval) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } diff --git a/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java index bad53242a67..3a4a0a71022 100644 --- a/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java +++ b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java @@ -75,37 +75,31 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor { } @Override - @SuppressWarnings("unused") public void setInMemory(boolean inMemory) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setReadOnly(boolean readOnly) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setValue(byte[] key, byte[] value) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setValue(String key, String value) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setMaxFileSize(long maxFileSize) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } @Override - @SuppressWarnings("unused") public void setMemcacheFlushSize(int memcacheFlushSize) { throw new UnsupportedOperationException("HTableDescriptor is read-only"); } diff --git a/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java b/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java index 0e469d7c1f1..41592e449bd 100644 --- a/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java +++ b/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java @@ -137,7 +137,6 @@ public class IndexSpecification implements Writable { } /** {@inheritDoc} */ - @SuppressWarnings("unchecked") public void readFields(DataInput in) throws IOException { indexId = in.readUTF(); int numIndexedCols = in.readInt(); diff --git a/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java b/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java index d384c4aef0d..f3d030facbe 100644 --- a/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java +++ b/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTable.java @@ -48,7 +48,7 @@ public class IndexedTable extends TransactionalTable { public static final byte[] INDEX_BASE_ROW_COLUMN = Bytes.add( INDEX_COL_FAMILY, Bytes.toBytes("ROW")); - private static final Log LOG = LogFactory.getLog(IndexedTable.class); + static final Log LOG = LogFactory.getLog(IndexedTable.class); private Map indexIdToTable = new HashMap(); diff --git a/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java b/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java index 9c753c88eb3..4c02ce1d149 100644 --- a/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java +++ b/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexedTableAdmin.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; -import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.util.Bytes; diff --git a/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java b/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java index 0b6149e49a9..76573630071 100644 --- a/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java +++ b/src/java/org/apache/hadoop/hbase/client/transactional/CommitUnsuccessfulException.java @@ -22,9 +22,10 @@ package org.apache.hadoop.hbase.client.transactional; /** Thrown when a transaction cannot be committed. * */ -@SuppressWarnings("serial") public class CommitUnsuccessfulException extends Exception { + private static final long serialVersionUID = 7062921444531109202L; + /** Default Constructor */ public CommitUnsuccessfulException() { super(); diff --git a/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java b/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java index 33ebc665a4e..66f2bc50424 100644 --- a/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java +++ b/src/java/org/apache/hadoop/hbase/client/transactional/UnknownTransactionException.java @@ -24,8 +24,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; /** * Thrown if a region server is passed an unknown transaction id */ -@SuppressWarnings("serial") - public class UnknownTransactionException extends DoNotRetryIOException { +public class UnknownTransactionException extends DoNotRetryIOException { + + private static final long serialVersionUID = 698575374929591099L; /** constructor */ public UnknownTransactionException() { diff --git a/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java b/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java index 07b1bc74401..4fd54d6ac6c 100644 --- a/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java @@ -122,11 +122,11 @@ public class ColumnValueFilter implements RowFilterInterface { this.filterIfColumnMissing = filterIfColumnMissing; } - public boolean filterRowKey(@SuppressWarnings("unused") final byte[] rowKey) { + public boolean filterRowKey(final byte[] rowKey) { return false; } - public boolean filterColumn(@SuppressWarnings("unused") final byte[] rowKey, + public boolean filterColumn(final byte[] rowKey, final byte[] colKey, final byte[] data) { if (!filterIfColumnMissing) { return false; // Must filter on the whole row @@ -201,12 +201,12 @@ public class ColumnValueFilter implements RowFilterInterface { // Nothing. } - public void rowProcessed(@SuppressWarnings("unused") final boolean filtered, - @SuppressWarnings("unused") final byte[] key) { + public void rowProcessed(final boolean filtered, + final byte[] key) { // Nothing } - public void validate(@SuppressWarnings("unused") final byte[][] columns) { + public void validate(final byte[][] columns) { // Nothing } diff --git a/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java b/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java index 7e6ad513b6e..f67e5e0050e 100644 --- a/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/PageRowFilter.java @@ -61,7 +61,7 @@ public class PageRowFilter implements RowFilterInterface { this.pageSize = pageSize; } - public void validate(@SuppressWarnings("unused") final byte [][] columns) { + public void validate(final byte [][] columns) { // Doesn't filter columns } @@ -70,7 +70,7 @@ public class PageRowFilter implements RowFilterInterface { } public void rowProcessed(boolean filtered, - @SuppressWarnings("unused") byte [] rowKey) { + byte [] rowKey) { if (!filtered) { this.rowsAccepted++; } @@ -84,18 +84,17 @@ public class PageRowFilter implements RowFilterInterface { return this.rowsAccepted > this.pageSize; } - public boolean filterRowKey(@SuppressWarnings("unused") final byte [] r) { + public boolean filterRowKey(final byte [] r) { return filterAllRemaining(); } - public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey, - @SuppressWarnings("unused") final byte [] colKey, - @SuppressWarnings("unused") final byte[] data) { + public boolean filterColumn(final byte [] rowKey, + final byte [] colKey, + final byte[] data) { return filterAllRemaining(); } - public boolean filterRow(@SuppressWarnings("unused") - final SortedMap columns) { + public boolean filterRow(final SortedMap columns) { return filterAllRemaining(); } diff --git a/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java b/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java index f80e76d7b14..004e1933cf0 100644 --- a/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/PrefixRowFilter.java @@ -44,15 +44,12 @@ public class PrefixRowFilter implements RowFilterInterface { * Default Constructor, filters nothing. Required for RPC * deserialization */ - @SuppressWarnings("unused") public PrefixRowFilter() { } - @SuppressWarnings("unused") public void reset() { // Nothing to reset } - - @SuppressWarnings("unused") + public void rowProcessed(boolean filtered, byte [] key) { // does not care } @@ -76,18 +73,15 @@ public class PrefixRowFilter implements RowFilterInterface { return false; } - @SuppressWarnings("unused") public boolean filterColumn(final byte [] rowKey, final byte [] colunmName, final byte[] columnValue) { return false; } - @SuppressWarnings("unused") public boolean filterRow(final SortedMap columns) { return false; } - @SuppressWarnings("unused") public void validate(final byte [][] columns) { // does not do this } diff --git a/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java b/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java index c2e6a9748da..8bb96b2156d 100644 --- a/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/RegExpRowFilter.java @@ -86,7 +86,6 @@ public class RegExpRowFilter implements RowFilterInterface { this.setColumnFilters(columnFilter); } - @SuppressWarnings("unused") public void rowProcessed(boolean filtered, byte [] rowKey) { //doesn't care } diff --git a/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java b/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java index b7155a91cc5..3fd240274e5 100644 --- a/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java @@ -61,7 +61,7 @@ public class StopRowFilter implements RowFilterInterface { return this.stopRowKey; } - public void validate(@SuppressWarnings("unused") final byte [][] columns) { + public void validate(final byte [][] columns) { // Doesn't filter columns } @@ -69,7 +69,6 @@ public class StopRowFilter implements RowFilterInterface { // Nothing to reset } - @SuppressWarnings("unused") public void rowProcessed(boolean filtered, byte [] rowKey) { // Doesn't care } @@ -96,9 +95,8 @@ public class StopRowFilter implements RowFilterInterface { * Because StopRowFilter does not examine column information, this method * defaults to calling the rowKey-only version of filter. */ - public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey, - @SuppressWarnings("unused") final byte [] colKey, - @SuppressWarnings("unused") final byte[] data) { + public boolean filterColumn(final byte [] rowKey, final byte [] colKey, + final byte[] data) { return filterRowKey(rowKey); } @@ -106,8 +104,7 @@ public class StopRowFilter implements RowFilterInterface { * Because StopRowFilter does not examine column information, this method * defaults to calling filterAllRemaining(). */ - public boolean filterRow(@SuppressWarnings("unused") - final SortedMap columns) { + public boolean filterRow(final SortedMap columns) { return filterAllRemaining(); } diff --git a/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java b/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java index d3fa9563a1d..5854670a4d3 100644 --- a/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java +++ b/src/java/org/apache/hadoop/hbase/io/BlockFSInputStream.java @@ -94,6 +94,8 @@ public class BlockFSInputStream extends FSInputStream { // A memory-sensitive map that has soft references to values this.blocks = new SoftValueMap() { private long hits, misses; + + @Override public byte [] get(Object key) { byte [] value = super.get(key); if (value == null) { @@ -140,7 +142,6 @@ public class BlockFSInputStream extends FSInputStream { } @Override - @SuppressWarnings("unused") public synchronized boolean seekToNewSource(long targetPos) throws IOException { return false; @@ -234,7 +235,6 @@ public class BlockFSInputStream extends FSInputStream { } @Override - @SuppressWarnings("unused") public void mark(int readLimit) { // Do nothing } diff --git a/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java b/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java index e755fd6aeca..af3c6de4321 100644 --- a/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java +++ b/src/java/org/apache/hadoop/hbase/io/BloomFilterMapFile.java @@ -42,8 +42,11 @@ import org.onelab.filter.Key; * tested first against bloom filter. Keys are HStoreKey. If passed bloom * filter is null, just passes invocation to parent. */ +// TODO should be fixed generic warnings from MapFile methods +@SuppressWarnings("unchecked") public class BloomFilterMapFile extends HBaseMapFile { - private static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class); + @SuppressWarnings("hiding") + static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class); protected static final String BLOOMFILTER_FILE_NAME = "filter"; public static class Reader extends HBaseReader { @@ -148,7 +151,6 @@ public class BloomFilterMapFile extends HBaseMapFile { * @param hri * @throws IOException */ - @SuppressWarnings("unchecked") public Writer(Configuration conf, FileSystem fs, String dirName, SequenceFile.CompressionType compression, final boolean filter, int nrows, final HRegionInfo hri) diff --git a/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java b/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java index 091f477426c..c627ec221b0 100644 --- a/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java +++ b/src/java/org/apache/hadoop/hbase/io/HBaseMapFile.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -35,7 +33,8 @@ import org.apache.hadoop.io.Writable; * HBase customizations of MapFile. */ public class HBaseMapFile extends MapFile { - private static final Log LOG = LogFactory.getLog(HBaseMapFile.class); + // TODO not used. remove?! + // private static final Log LOG = LogFactory.getLog(HBaseMapFile.class); /** * Values are instances of this class. diff --git a/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java b/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java index 7ae9447084f..e1d36c9cb4b 100644 --- a/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java +++ b/src/java/org/apache/hadoop/hbase/io/HalfMapFileReader.java @@ -43,6 +43,7 @@ import org.apache.hadoop.io.WritableComparable; * *

This file is not splitable. Calls to {@link #midKey()} return null. */ +//TODO should be fixed generic warnings from MapFile methods public class HalfMapFileReader extends BloomFilterMapFile.Reader { private final boolean top; private final HStoreKey midkey; @@ -76,7 +77,6 @@ public class HalfMapFileReader extends BloomFilterMapFile.Reader { * @param hri * @throws IOException */ - @SuppressWarnings("unchecked") public HalfMapFileReader(final FileSystem fs, final String dirName, final Configuration conf, final Range r, final WritableComparable mk, final boolean filter, @@ -164,7 +164,7 @@ public class HalfMapFileReader extends BloomFilterMapFile.Reader { return closest; } - @SuppressWarnings({"unused", "unchecked"}) + @SuppressWarnings("unchecked") @Override public synchronized WritableComparable midKey() throws IOException { // Returns null to indicate file is not splitable. diff --git a/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java b/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java index d564c19a1fa..951b8f18b9d 100644 --- a/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java +++ b/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java @@ -45,7 +45,7 @@ import org.apache.hadoop.util.ReflectionUtils; * if passed a value type that it has not already been told about. Its been * primed with hbase Writables and byte []. Keys are always byte arrays. * - * @param key + * @param key TODO: Parameter K is never used, could be removed. * @param value Expects a Writable or byte []. */ public class HbaseMapWritable @@ -164,13 +164,13 @@ implements SortedMap, Writable, Configurable { // Writable /** @return the Class class for the specified id */ - @SuppressWarnings({ "unchecked", "boxing" }) + @SuppressWarnings("boxing") protected Class getClass(byte id) { return CODE_TO_CLASS.get(id); } /** @return the id for the specified Class */ - @SuppressWarnings({ "unchecked", "boxing" }) + @SuppressWarnings("boxing") protected byte getId(Class clazz) { Byte b = CLASS_TO_CODE.get(clazz); if (b == null) { diff --git a/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java index 23c4f4d3d60..73b0b01d817 100644 --- a/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java +++ b/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterSet; import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.MapWritable; -import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; @@ -243,51 +242,54 @@ public class HbaseObjectWritable implements Writable, Configurable { Configuration conf) throws IOException { - if (instance == null) { // null - instance = new NullInstance(declaredClass, conf); - declaredClass = Writable.class; + Object instanceObj = instance; + Class declClass = declaredClass; + + if (instanceObj == null) { // null + instanceObj = new NullInstance(declClass, conf); + declClass = Writable.class; } - writeClassCode(out, declaredClass); - if (declaredClass.isArray()) { // array + writeClassCode(out, declClass); + if (declClass.isArray()) { // array // If bytearray, just dump it out -- avoid the recursion and // byte-at-a-time we were previously doing. - if (declaredClass.equals(byte [].class)) { - Bytes.writeByteArray(out, (byte [])instance); + if (declClass.equals(byte [].class)) { + Bytes.writeByteArray(out, (byte [])instanceObj); } else { - int length = Array.getLength(instance); + int length = Array.getLength(instanceObj); out.writeInt(length); for (int i = 0; i < length; i++) { - writeObject(out, Array.get(instance, i), - declaredClass.getComponentType(), conf); + writeObject(out, Array.get(instanceObj, i), + declClass.getComponentType(), conf); } } - } else if (declaredClass == String.class) { // String - Text.writeString(out, (String)instance); - } else if (declaredClass.isPrimitive()) { // primitive type - if (declaredClass == Boolean.TYPE) { // boolean - out.writeBoolean(((Boolean)instance).booleanValue()); - } else if (declaredClass == Character.TYPE) { // char - out.writeChar(((Character)instance).charValue()); - } else if (declaredClass == Byte.TYPE) { // byte - out.writeByte(((Byte)instance).byteValue()); - } else if (declaredClass == Short.TYPE) { // short - out.writeShort(((Short)instance).shortValue()); - } else if (declaredClass == Integer.TYPE) { // int - out.writeInt(((Integer)instance).intValue()); - } else if (declaredClass == Long.TYPE) { // long - out.writeLong(((Long)instance).longValue()); - } else if (declaredClass == Float.TYPE) { // float - out.writeFloat(((Float)instance).floatValue()); - } else if (declaredClass == Double.TYPE) { // double - out.writeDouble(((Double)instance).doubleValue()); - } else if (declaredClass == Void.TYPE) { // void + } else if (declClass == String.class) { // String + Text.writeString(out, (String)instanceObj); + } else if (declClass.isPrimitive()) { // primitive type + if (declClass == Boolean.TYPE) { // boolean + out.writeBoolean(((Boolean)instanceObj).booleanValue()); + } else if (declClass == Character.TYPE) { // char + out.writeChar(((Character)instanceObj).charValue()); + } else if (declClass == Byte.TYPE) { // byte + out.writeByte(((Byte)instanceObj).byteValue()); + } else if (declClass == Short.TYPE) { // short + out.writeShort(((Short)instanceObj).shortValue()); + } else if (declClass == Integer.TYPE) { // int + out.writeInt(((Integer)instanceObj).intValue()); + } else if (declClass == Long.TYPE) { // long + out.writeLong(((Long)instanceObj).longValue()); + } else if (declClass == Float.TYPE) { // float + out.writeFloat(((Float)instanceObj).floatValue()); + } else if (declClass == Double.TYPE) { // double + out.writeDouble(((Double)instanceObj).doubleValue()); + } else if (declClass == Void.TYPE) { // void } else { - throw new IllegalArgumentException("Not a primitive: "+declaredClass); + throw new IllegalArgumentException("Not a primitive: "+declClass); } - } else if (declaredClass.isEnum()) { // enum - Text.writeString(out, ((Enum)instance).name()); - } else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable - Class c = instance.getClass(); + } else if (declClass.isEnum()) { // enum + Text.writeString(out, ((Enum)instanceObj).name()); + } else if (Writable.class.isAssignableFrom(declClass)) { // Writable + Class c = instanceObj.getClass(); Byte code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); @@ -295,9 +297,9 @@ public class HbaseObjectWritable implements Writable, Configurable { } else { writeClassCode(out, c); } - ((Writable)instance).write(out); + ((Writable)instanceObj).write(out); } else { - throw new IOException("Can't write: "+instance+" as "+declaredClass); + throw new IOException("Can't write: "+instanceObj+" as "+declClass); } } diff --git a/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java index a5402b587ae..7caff3779e3 100644 --- a/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java +++ b/src/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java @@ -36,7 +36,7 @@ import org.apache.hadoop.io.WritableComparator; * capacity as {@link org.apache.hadoop.io.BytesWritable} does. Hence its * comparatively 'immutable'. */ -public class ImmutableBytesWritable implements WritableComparable { +public class ImmutableBytesWritable implements WritableComparable { private byte[] bytes; /** @@ -129,8 +129,8 @@ public class ImmutableBytesWritable implements WritableComparable { * @return Positive if left is bigger than right, 0 if they are equal, and * negative if left is smaller than right. */ - public int compareTo(Object right_obj) { - return compareTo(((ImmutableBytesWritable)right_obj).get()); + public int compareTo(ImmutableBytesWritable right_obj) { + return compareTo(right_obj.get()); } /** @@ -153,7 +153,7 @@ public class ImmutableBytesWritable implements WritableComparable { return compareTo((byte [])right_obj) == 0; } if (right_obj instanceof ImmutableBytesWritable) { - return compareTo(right_obj) == 0; + return compareTo((ImmutableBytesWritable)right_obj) == 0; } return false; } diff --git a/src/java/org/apache/hadoop/hbase/io/RowResult.java b/src/java/org/apache/hadoop/hbase/io/RowResult.java index 8b9e87c73cb..bbc83e71265 100644 --- a/src/java/org/apache/hadoop/hbase/io/RowResult.java +++ b/src/java/org/apache/hadoop/hbase/io/RowResult.java @@ -71,13 +71,13 @@ public class RowResult implements Writable, SortedMap { // Map interface // - public Cell put(@SuppressWarnings("unused") byte [] key, - @SuppressWarnings("unused") Cell value) { + public Cell put(byte [] key, + Cell value) { throw new UnsupportedOperationException("RowResult is read-only!"); } @SuppressWarnings("unchecked") - public void putAll(@SuppressWarnings("unused") Map map) { + public void putAll(Map map) { throw new UnsupportedOperationException("RowResult is read-only!"); } @@ -85,7 +85,7 @@ public class RowResult implements Writable, SortedMap { return this.cells.get(key); } - public Cell remove(@SuppressWarnings("unused") Object key) { + public Cell remove(Object key) { throw new UnsupportedOperationException("RowResult is read-only!"); } @@ -97,7 +97,7 @@ public class RowResult implements Writable, SortedMap { return cells.containsKey(Bytes.toBytes(key)); } - public boolean containsValue(@SuppressWarnings("unused") Object value) { + public boolean containsValue(Object value) { throw new UnsupportedOperationException("Don't support containsValue!"); } @@ -188,7 +188,7 @@ public class RowResult implements Writable, SortedMap { this.cell = cell; } - public Cell setValue(@SuppressWarnings("unused") Cell c) { + public Cell setValue(Cell c) { throw new UnsupportedOperationException("RowResult is read-only!"); }