HBASE-876 There are a large number of Java warnings in HBase

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@735946 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-01-20 06:32:36 +00:00
parent 4b49a9c162
commit 781b10724f
25 changed files with 102 additions and 122 deletions

View File

@ -12,8 +12,8 @@ Release 0.20.0 - Unreleased
(Samuel Guo via Stack) (Samuel Guo via Stack)
HBASE-1130 PrefixRowFilter (Michael Gottesman via Stack) HBASE-1130 PrefixRowFilter (Michael Gottesman via Stack)
HBASE-1139 Update Clover in build.xml HBASE-1139 Update Clover in build.xml
HBASE-876 There are a large number of Java warnings in HBase; part 1 HBASE-876 There are a large number of Java warnings in HBase; part 1,
(Evgeny Ryabitskiy via Stack) part 2, and part 3 (Evgeny Ryabitskiy via Stack)
Release 0.19.0 - Unreleased Release 0.19.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.RegionException; import org.apache.hadoop.hbase.RegionException;
import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.io.Cell; import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.io.RowResult; import org.apache.hadoop.hbase.io.RowResult;
@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.Shell.ExitCodeException;
/** /**
* Provides administrative functions for HBase * Provides administrative functions for HBase
@ -133,9 +131,10 @@ public class HBaseAdmin {
} }
private long getPauseTime(int tries) { private long getPauseTime(int tries) {
if (tries >= HConstants.RETRY_BACKOFF.length) int triesCount = tries;
tries = HConstants.RETRY_BACKOFF.length - 1; if (triesCount >= HConstants.RETRY_BACKOFF.length)
return this.pause * HConstants.RETRY_BACKOFF[tries]; triesCount = HConstants.RETRY_BACKOFF.length - 1;
return this.pause * HConstants.RETRY_BACKOFF[triesCount];
} }
/** /**
@ -534,8 +533,10 @@ public class HBaseAdmin {
int xtraArgsCount = 1; int xtraArgsCount = 1;
Object [] newargs = new Object[len + xtraArgsCount]; Object [] newargs = new Object[len + xtraArgsCount];
newargs[0] = regionname; newargs[0] = regionname;
for (int i = 0; i < len; i++) { if(args != null) {
newargs[i + xtraArgsCount] = args[i]; for (int i = 0; i < len; i++) {
newargs[i + xtraArgsCount] = args[i];
}
} }
modifyTable(HConstants.META_TABLE_NAME, HConstants.MODIFY_CLOSE_REGION, modifyTable(HConstants.META_TABLE_NAME, HConstants.MODIFY_CLOSE_REGION,
newargs); newargs);

View File

@ -875,20 +875,21 @@ public class HConnectionManager implements HConstants {
getRegionLocationForRowWithRetries(byte[] tableName, byte[] rowKey, getRegionLocationForRowWithRetries(byte[] tableName, byte[] rowKey,
boolean reload) boolean reload)
throws IOException { throws IOException {
boolean reloadFlag = reload;
getMaster(); getMaster();
List<Throwable> exceptions = new ArrayList<Throwable>(); List<Throwable> exceptions = new ArrayList<Throwable>();
HRegionLocation location = null; HRegionLocation location = null;
int tries = 0; int tries = 0;
while (tries < numRetries) { while (tries < numRetries) {
try { try {
location = getRegionLocation(tableName, rowKey, reload); location = getRegionLocation(tableName, rowKey, reloadFlag);
} catch (Throwable t) { } catch (Throwable t) {
exceptions.add(t); exceptions.add(t);
} }
if (location != null) { if (location != null) {
break; break;
} }
reload = true; reloadFlag = true;
tries++; tries++;
try { try {
Thread.sleep(getPauseTime(tries)); Thread.sleep(getPauseTime(tries));

View File

@ -7,7 +7,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.RowResult; import org.apache.hadoop.hbase.io.RowResult;
import org.apache.hadoop.hbase.util.Bytes; //TODO: remove
/** /**
* Scanner class that contains the <code>.META.</code> table scanning logic * Scanner class that contains the <code>.META.</code> table scanning logic

View File

@ -25,8 +25,10 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
/** /**
* Thrown when a scanner has timed out. * Thrown when a scanner has timed out.
*/ */
@SuppressWarnings("serial")
public class ScannerTimeoutException extends DoNotRetryIOException { public class ScannerTimeoutException extends DoNotRetryIOException {
private static final long serialVersionUID = 8788838690290688313L;
/** default constructor */ /** default constructor */
ScannerTimeoutException() { ScannerTimeoutException() {
super(); super();

View File

@ -15,55 +15,46 @@ public class UnmodifyableHColumnDescriptor extends HColumnDescriptor {
} }
@Override @Override
@SuppressWarnings("unused")
public void setValue(byte[] key, byte[] value) { public void setValue(byte[] key, byte[] value) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setValue(String key, String value) { public void setValue(String key, String value) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setMaxVersions(int maxVersions) { public void setMaxVersions(int maxVersions) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setInMemory(boolean inMemory) { public void setInMemory(boolean inMemory) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setBlockCacheEnabled(boolean blockCacheEnabled) { public void setBlockCacheEnabled(boolean blockCacheEnabled) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setMaxValueLength(int maxLength) { public void setMaxValueLength(int maxLength) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setTimeToLive(int timeToLive) { public void setTimeToLive(int timeToLive) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setCompressionType(CompressionType type) { public void setCompressionType(CompressionType type) {
throw new UnsupportedOperationException("HColumnDescriptor is read-only"); throw new UnsupportedOperationException("HColumnDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setMapFileIndexInterval(int interval) { public void setMapFileIndexInterval(int interval) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }

View File

@ -75,37 +75,31 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor {
} }
@Override @Override
@SuppressWarnings("unused")
public void setInMemory(boolean inMemory) { public void setInMemory(boolean inMemory) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setReadOnly(boolean readOnly) { public void setReadOnly(boolean readOnly) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setValue(byte[] key, byte[] value) { public void setValue(byte[] key, byte[] value) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setValue(String key, String value) { public void setValue(String key, String value) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setMaxFileSize(long maxFileSize) { public void setMaxFileSize(long maxFileSize) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }
@Override @Override
@SuppressWarnings("unused")
public void setMemcacheFlushSize(int memcacheFlushSize) { public void setMemcacheFlushSize(int memcacheFlushSize) {
throw new UnsupportedOperationException("HTableDescriptor is read-only"); throw new UnsupportedOperationException("HTableDescriptor is read-only");
} }

View File

@ -137,7 +137,6 @@ public class IndexSpecification implements Writable {
} }
/** {@inheritDoc} */ /** {@inheritDoc} */
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
indexId = in.readUTF(); indexId = in.readUTF();
int numIndexedCols = in.readInt(); int numIndexedCols = in.readInt();

View File

@ -48,7 +48,7 @@ public class IndexedTable extends TransactionalTable {
public static final byte[] INDEX_BASE_ROW_COLUMN = Bytes.add( public static final byte[] INDEX_BASE_ROW_COLUMN = Bytes.add(
INDEX_COL_FAMILY, Bytes.toBytes("ROW")); INDEX_COL_FAMILY, Bytes.toBytes("ROW"));
private static final Log LOG = LogFactory.getLog(IndexedTable.class); static final Log LOG = LogFactory.getLog(IndexedTable.class);
private Map<String, HTable> indexIdToTable = new HashMap<String, HTable>(); private Map<String, HTable> indexIdToTable = new HashMap<String, HTable>();

View File

@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;

View File

@ -22,9 +22,10 @@ package org.apache.hadoop.hbase.client.transactional;
/** Thrown when a transaction cannot be committed. /** Thrown when a transaction cannot be committed.
* *
*/ */
@SuppressWarnings("serial")
public class CommitUnsuccessfulException extends Exception { public class CommitUnsuccessfulException extends Exception {
private static final long serialVersionUID = 7062921444531109202L;
/** Default Constructor */ /** Default Constructor */
public CommitUnsuccessfulException() { public CommitUnsuccessfulException() {
super(); super();

View File

@ -24,8 +24,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
/** /**
* Thrown if a region server is passed an unknown transaction id * Thrown if a region server is passed an unknown transaction id
*/ */
@SuppressWarnings("serial") public class UnknownTransactionException extends DoNotRetryIOException {
public class UnknownTransactionException extends DoNotRetryIOException {
private static final long serialVersionUID = 698575374929591099L;
/** constructor */ /** constructor */
public UnknownTransactionException() { public UnknownTransactionException() {

View File

@ -122,11 +122,11 @@ public class ColumnValueFilter implements RowFilterInterface {
this.filterIfColumnMissing = filterIfColumnMissing; this.filterIfColumnMissing = filterIfColumnMissing;
} }
public boolean filterRowKey(@SuppressWarnings("unused") final byte[] rowKey) { public boolean filterRowKey(final byte[] rowKey) {
return false; return false;
} }
public boolean filterColumn(@SuppressWarnings("unused") final byte[] rowKey, public boolean filterColumn(final byte[] rowKey,
final byte[] colKey, final byte[] data) { final byte[] colKey, final byte[] data) {
if (!filterIfColumnMissing) { if (!filterIfColumnMissing) {
return false; // Must filter on the whole row return false; // Must filter on the whole row
@ -201,12 +201,12 @@ public class ColumnValueFilter implements RowFilterInterface {
// Nothing. // Nothing.
} }
public void rowProcessed(@SuppressWarnings("unused") final boolean filtered, public void rowProcessed(final boolean filtered,
@SuppressWarnings("unused") final byte[] key) { final byte[] key) {
// Nothing // Nothing
} }
public void validate(@SuppressWarnings("unused") final byte[][] columns) { public void validate(final byte[][] columns) {
// Nothing // Nothing
} }

View File

@ -61,7 +61,7 @@ public class PageRowFilter implements RowFilterInterface {
this.pageSize = pageSize; this.pageSize = pageSize;
} }
public void validate(@SuppressWarnings("unused") final byte [][] columns) { public void validate(final byte [][] columns) {
// Doesn't filter columns // Doesn't filter columns
} }
@ -70,7 +70,7 @@ public class PageRowFilter implements RowFilterInterface {
} }
public void rowProcessed(boolean filtered, public void rowProcessed(boolean filtered,
@SuppressWarnings("unused") byte [] rowKey) { byte [] rowKey) {
if (!filtered) { if (!filtered) {
this.rowsAccepted++; this.rowsAccepted++;
} }
@ -84,18 +84,17 @@ public class PageRowFilter implements RowFilterInterface {
return this.rowsAccepted > this.pageSize; return this.rowsAccepted > this.pageSize;
} }
public boolean filterRowKey(@SuppressWarnings("unused") final byte [] r) { public boolean filterRowKey(final byte [] r) {
return filterAllRemaining(); return filterAllRemaining();
} }
public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey, public boolean filterColumn(final byte [] rowKey,
@SuppressWarnings("unused") final byte [] colKey, final byte [] colKey,
@SuppressWarnings("unused") final byte[] data) { final byte[] data) {
return filterAllRemaining(); return filterAllRemaining();
} }
public boolean filterRow(@SuppressWarnings("unused") public boolean filterRow(final SortedMap<byte [], Cell> columns) {
final SortedMap<byte [], Cell> columns) {
return filterAllRemaining(); return filterAllRemaining();
} }

View File

@ -44,15 +44,12 @@ public class PrefixRowFilter implements RowFilterInterface {
* Default Constructor, filters nothing. Required for RPC * Default Constructor, filters nothing. Required for RPC
* deserialization * deserialization
*/ */
@SuppressWarnings("unused")
public PrefixRowFilter() { } public PrefixRowFilter() { }
@SuppressWarnings("unused")
public void reset() { public void reset() {
// Nothing to reset // Nothing to reset
} }
@SuppressWarnings("unused")
public void rowProcessed(boolean filtered, byte [] key) { public void rowProcessed(boolean filtered, byte [] key) {
// does not care // does not care
} }
@ -76,18 +73,15 @@ public class PrefixRowFilter implements RowFilterInterface {
return false; return false;
} }
@SuppressWarnings("unused")
public boolean filterColumn(final byte [] rowKey, final byte [] colunmName, public boolean filterColumn(final byte [] rowKey, final byte [] colunmName,
final byte[] columnValue) { final byte[] columnValue) {
return false; return false;
} }
@SuppressWarnings("unused")
public boolean filterRow(final SortedMap<byte [], Cell> columns) { public boolean filterRow(final SortedMap<byte [], Cell> columns) {
return false; return false;
} }
@SuppressWarnings("unused")
public void validate(final byte [][] columns) { public void validate(final byte [][] columns) {
// does not do this // does not do this
} }

View File

@ -86,7 +86,6 @@ public class RegExpRowFilter implements RowFilterInterface {
this.setColumnFilters(columnFilter); this.setColumnFilters(columnFilter);
} }
@SuppressWarnings("unused")
public void rowProcessed(boolean filtered, byte [] rowKey) { public void rowProcessed(boolean filtered, byte [] rowKey) {
//doesn't care //doesn't care
} }

View File

@ -61,7 +61,7 @@ public class StopRowFilter implements RowFilterInterface {
return this.stopRowKey; return this.stopRowKey;
} }
public void validate(@SuppressWarnings("unused") final byte [][] columns) { public void validate(final byte [][] columns) {
// Doesn't filter columns // Doesn't filter columns
} }
@ -69,7 +69,6 @@ public class StopRowFilter implements RowFilterInterface {
// Nothing to reset // Nothing to reset
} }
@SuppressWarnings("unused")
public void rowProcessed(boolean filtered, byte [] rowKey) { public void rowProcessed(boolean filtered, byte [] rowKey) {
// Doesn't care // Doesn't care
} }
@ -96,9 +95,8 @@ public class StopRowFilter implements RowFilterInterface {
* Because StopRowFilter does not examine column information, this method * Because StopRowFilter does not examine column information, this method
* defaults to calling the rowKey-only version of filter. * defaults to calling the rowKey-only version of filter.
*/ */
public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey, public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
@SuppressWarnings("unused") final byte [] colKey, final byte[] data) {
@SuppressWarnings("unused") final byte[] data) {
return filterRowKey(rowKey); return filterRowKey(rowKey);
} }
@ -106,8 +104,7 @@ public class StopRowFilter implements RowFilterInterface {
* Because StopRowFilter does not examine column information, this method * Because StopRowFilter does not examine column information, this method
* defaults to calling filterAllRemaining(). * defaults to calling filterAllRemaining().
*/ */
public boolean filterRow(@SuppressWarnings("unused") public boolean filterRow(final SortedMap<byte [], Cell> columns) {
final SortedMap<byte [], Cell> columns) {
return filterAllRemaining(); return filterAllRemaining();
} }

View File

@ -94,6 +94,8 @@ public class BlockFSInputStream extends FSInputStream {
// A memory-sensitive map that has soft references to values // A memory-sensitive map that has soft references to values
this.blocks = new SoftValueMap<Long, byte []>() { this.blocks = new SoftValueMap<Long, byte []>() {
private long hits, misses; private long hits, misses;
@Override
public byte [] get(Object key) { public byte [] get(Object key) {
byte [] value = super.get(key); byte [] value = super.get(key);
if (value == null) { if (value == null) {
@ -140,7 +142,6 @@ public class BlockFSInputStream extends FSInputStream {
} }
@Override @Override
@SuppressWarnings("unused")
public synchronized boolean seekToNewSource(long targetPos) public synchronized boolean seekToNewSource(long targetPos)
throws IOException { throws IOException {
return false; return false;
@ -234,7 +235,6 @@ public class BlockFSInputStream extends FSInputStream {
} }
@Override @Override
@SuppressWarnings("unused")
public void mark(int readLimit) { public void mark(int readLimit) {
// Do nothing // Do nothing
} }

View File

@ -42,8 +42,11 @@ import org.onelab.filter.Key;
* tested first against bloom filter. Keys are HStoreKey. If passed bloom * tested first against bloom filter. Keys are HStoreKey. If passed bloom
* filter is null, just passes invocation to parent. * filter is null, just passes invocation to parent.
*/ */
// TODO should be fixed generic warnings from MapFile methods
@SuppressWarnings("unchecked")
public class BloomFilterMapFile extends HBaseMapFile { public class BloomFilterMapFile extends HBaseMapFile {
private static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class); @SuppressWarnings("hiding")
static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class);
protected static final String BLOOMFILTER_FILE_NAME = "filter"; protected static final String BLOOMFILTER_FILE_NAME = "filter";
public static class Reader extends HBaseReader { public static class Reader extends HBaseReader {
@ -148,7 +151,6 @@ public class BloomFilterMapFile extends HBaseMapFile {
* @param hri * @param hri
* @throws IOException * @throws IOException
*/ */
@SuppressWarnings("unchecked")
public Writer(Configuration conf, FileSystem fs, String dirName, public Writer(Configuration conf, FileSystem fs, String dirName,
SequenceFile.CompressionType compression, final boolean filter, SequenceFile.CompressionType compression, final boolean filter,
int nrows, final HRegionInfo hri) int nrows, final HRegionInfo hri)

View File

@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.io;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -35,7 +33,8 @@ import org.apache.hadoop.io.Writable;
* HBase customizations of MapFile. * HBase customizations of MapFile.
*/ */
public class HBaseMapFile extends MapFile { public class HBaseMapFile extends MapFile {
private static final Log LOG = LogFactory.getLog(HBaseMapFile.class); // TODO not used. remove?!
// private static final Log LOG = LogFactory.getLog(HBaseMapFile.class);
/** /**
* Values are instances of this class. * Values are instances of this class.

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.io.WritableComparable;
* *
* <p>This file is not splitable. Calls to {@link #midKey()} return null. * <p>This file is not splitable. Calls to {@link #midKey()} return null.
*/ */
//TODO should be fixed generic warnings from MapFile methods
public class HalfMapFileReader extends BloomFilterMapFile.Reader { public class HalfMapFileReader extends BloomFilterMapFile.Reader {
private final boolean top; private final boolean top;
private final HStoreKey midkey; private final HStoreKey midkey;
@ -76,7 +77,6 @@ public class HalfMapFileReader extends BloomFilterMapFile.Reader {
* @param hri * @param hri
* @throws IOException * @throws IOException
*/ */
@SuppressWarnings("unchecked")
public HalfMapFileReader(final FileSystem fs, final String dirName, public HalfMapFileReader(final FileSystem fs, final String dirName,
final Configuration conf, final Range r, final Configuration conf, final Range r,
final WritableComparable<HStoreKey> mk, final boolean filter, final WritableComparable<HStoreKey> mk, final boolean filter,
@ -164,7 +164,7 @@ public class HalfMapFileReader extends BloomFilterMapFile.Reader {
return closest; return closest;
} }
@SuppressWarnings({"unused", "unchecked"}) @SuppressWarnings("unchecked")
@Override @Override
public synchronized WritableComparable midKey() throws IOException { public synchronized WritableComparable midKey() throws IOException {
// Returns null to indicate file is not splitable. // Returns null to indicate file is not splitable.

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.util.ReflectionUtils;
* if passed a value type that it has not already been told about. Its been * if passed a value type that it has not already been told about. Its been
* primed with hbase Writables and byte []. Keys are always byte arrays. * primed with hbase Writables and byte []. Keys are always byte arrays.
* *
* @param <byte []> key * @param <byte []> key TODO: Parameter K is never used, could be removed.
* @param <V> value Expects a Writable or byte []. * @param <V> value Expects a Writable or byte [].
*/ */
public class HbaseMapWritable <K, V> public class HbaseMapWritable <K, V>
@ -164,13 +164,13 @@ implements SortedMap<byte [], V>, Writable, Configurable {
// Writable // Writable
/** @return the Class class for the specified id */ /** @return the Class class for the specified id */
@SuppressWarnings({ "unchecked", "boxing" }) @SuppressWarnings("boxing")
protected Class<?> getClass(byte id) { protected Class<?> getClass(byte id) {
return CODE_TO_CLASS.get(id); return CODE_TO_CLASS.get(id);
} }
/** @return the id for the specified Class */ /** @return the id for the specified Class */
@SuppressWarnings({ "unchecked", "boxing" }) @SuppressWarnings("boxing")
protected byte getId(Class<?> clazz) { protected byte getId(Class<?> clazz) {
Byte b = CLASS_TO_CODE.get(clazz); Byte b = CLASS_TO_CODE.get(clazz);
if (b == null) { if (b == null) {

View File

@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.filter.RowFilterSet; import org.apache.hadoop.hbase.filter.RowFilterSet;
import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableFactories;
@ -243,51 +242,54 @@ public class HbaseObjectWritable implements Writable, Configurable {
Configuration conf) Configuration conf)
throws IOException { throws IOException {
if (instance == null) { // null Object instanceObj = instance;
instance = new NullInstance(declaredClass, conf); Class declClass = declaredClass;
declaredClass = Writable.class;
if (instanceObj == null) { // null
instanceObj = new NullInstance(declClass, conf);
declClass = Writable.class;
} }
writeClassCode(out, declaredClass); writeClassCode(out, declClass);
if (declaredClass.isArray()) { // array if (declClass.isArray()) { // array
// If bytearray, just dump it out -- avoid the recursion and // If bytearray, just dump it out -- avoid the recursion and
// byte-at-a-time we were previously doing. // byte-at-a-time we were previously doing.
if (declaredClass.equals(byte [].class)) { if (declClass.equals(byte [].class)) {
Bytes.writeByteArray(out, (byte [])instance); Bytes.writeByteArray(out, (byte [])instanceObj);
} else { } else {
int length = Array.getLength(instance); int length = Array.getLength(instanceObj);
out.writeInt(length); out.writeInt(length);
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
writeObject(out, Array.get(instance, i), writeObject(out, Array.get(instanceObj, i),
declaredClass.getComponentType(), conf); declClass.getComponentType(), conf);
} }
} }
} else if (declaredClass == String.class) { // String } else if (declClass == String.class) { // String
Text.writeString(out, (String)instance); Text.writeString(out, (String)instanceObj);
} else if (declaredClass.isPrimitive()) { // primitive type } else if (declClass.isPrimitive()) { // primitive type
if (declaredClass == Boolean.TYPE) { // boolean if (declClass == Boolean.TYPE) { // boolean
out.writeBoolean(((Boolean)instance).booleanValue()); out.writeBoolean(((Boolean)instanceObj).booleanValue());
} else if (declaredClass == Character.TYPE) { // char } else if (declClass == Character.TYPE) { // char
out.writeChar(((Character)instance).charValue()); out.writeChar(((Character)instanceObj).charValue());
} else if (declaredClass == Byte.TYPE) { // byte } else if (declClass == Byte.TYPE) { // byte
out.writeByte(((Byte)instance).byteValue()); out.writeByte(((Byte)instanceObj).byteValue());
} else if (declaredClass == Short.TYPE) { // short } else if (declClass == Short.TYPE) { // short
out.writeShort(((Short)instance).shortValue()); out.writeShort(((Short)instanceObj).shortValue());
} else if (declaredClass == Integer.TYPE) { // int } else if (declClass == Integer.TYPE) { // int
out.writeInt(((Integer)instance).intValue()); out.writeInt(((Integer)instanceObj).intValue());
} else if (declaredClass == Long.TYPE) { // long } else if (declClass == Long.TYPE) { // long
out.writeLong(((Long)instance).longValue()); out.writeLong(((Long)instanceObj).longValue());
} else if (declaredClass == Float.TYPE) { // float } else if (declClass == Float.TYPE) { // float
out.writeFloat(((Float)instance).floatValue()); out.writeFloat(((Float)instanceObj).floatValue());
} else if (declaredClass == Double.TYPE) { // double } else if (declClass == Double.TYPE) { // double
out.writeDouble(((Double)instance).doubleValue()); out.writeDouble(((Double)instanceObj).doubleValue());
} else if (declaredClass == Void.TYPE) { // void } else if (declClass == Void.TYPE) { // void
} else { } else {
throw new IllegalArgumentException("Not a primitive: "+declaredClass); throw new IllegalArgumentException("Not a primitive: "+declClass);
} }
} else if (declaredClass.isEnum()) { // enum } else if (declClass.isEnum()) { // enum
Text.writeString(out, ((Enum)instance).name()); Text.writeString(out, ((Enum)instanceObj).name());
} else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable } else if (Writable.class.isAssignableFrom(declClass)) { // Writable
Class <?> c = instance.getClass(); Class <?> c = instanceObj.getClass();
Byte code = CLASS_TO_CODE.get(c); Byte code = CLASS_TO_CODE.get(c);
if (code == null) { if (code == null) {
out.writeByte(NOT_ENCODED); out.writeByte(NOT_ENCODED);
@ -295,9 +297,9 @@ public class HbaseObjectWritable implements Writable, Configurable {
} else { } else {
writeClassCode(out, c); writeClassCode(out, c);
} }
((Writable)instance).write(out); ((Writable)instanceObj).write(out);
} else { } else {
throw new IOException("Can't write: "+instance+" as "+declaredClass); throw new IOException("Can't write: "+instanceObj+" as "+declClass);
} }
} }

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.io.WritableComparator;
* capacity as {@link org.apache.hadoop.io.BytesWritable} does. Hence its * capacity as {@link org.apache.hadoop.io.BytesWritable} does. Hence its
* comparatively 'immutable'. * comparatively 'immutable'.
*/ */
public class ImmutableBytesWritable implements WritableComparable { public class ImmutableBytesWritable implements WritableComparable<ImmutableBytesWritable> {
private byte[] bytes; private byte[] bytes;
/** /**
@ -129,8 +129,8 @@ public class ImmutableBytesWritable implements WritableComparable {
* @return Positive if left is bigger than right, 0 if they are equal, and * @return Positive if left is bigger than right, 0 if they are equal, and
* negative if left is smaller than right. * negative if left is smaller than right.
*/ */
public int compareTo(Object right_obj) { public int compareTo(ImmutableBytesWritable right_obj) {
return compareTo(((ImmutableBytesWritable)right_obj).get()); return compareTo(right_obj.get());
} }
/** /**
@ -153,7 +153,7 @@ public class ImmutableBytesWritable implements WritableComparable {
return compareTo((byte [])right_obj) == 0; return compareTo((byte [])right_obj) == 0;
} }
if (right_obj instanceof ImmutableBytesWritable) { if (right_obj instanceof ImmutableBytesWritable) {
return compareTo(right_obj) == 0; return compareTo((ImmutableBytesWritable)right_obj) == 0;
} }
return false; return false;
} }

View File

@ -71,13 +71,13 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
// Map interface // Map interface
// //
public Cell put(@SuppressWarnings("unused") byte [] key, public Cell put(byte [] key,
@SuppressWarnings("unused") Cell value) { Cell value) {
throw new UnsupportedOperationException("RowResult is read-only!"); throw new UnsupportedOperationException("RowResult is read-only!");
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public void putAll(@SuppressWarnings("unused") Map map) { public void putAll(Map map) {
throw new UnsupportedOperationException("RowResult is read-only!"); throw new UnsupportedOperationException("RowResult is read-only!");
} }
@ -85,7 +85,7 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
return this.cells.get(key); return this.cells.get(key);
} }
public Cell remove(@SuppressWarnings("unused") Object key) { public Cell remove(Object key) {
throw new UnsupportedOperationException("RowResult is read-only!"); throw new UnsupportedOperationException("RowResult is read-only!");
} }
@ -97,7 +97,7 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
return cells.containsKey(Bytes.toBytes(key)); return cells.containsKey(Bytes.toBytes(key));
} }
public boolean containsValue(@SuppressWarnings("unused") Object value) { public boolean containsValue(Object value) {
throw new UnsupportedOperationException("Don't support containsValue!"); throw new UnsupportedOperationException("Don't support containsValue!");
} }
@ -188,7 +188,7 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
this.cell = cell; this.cell = cell;
} }
public Cell setValue(@SuppressWarnings("unused") Cell c) { public Cell setValue(Cell c) {
throw new UnsupportedOperationException("RowResult is read-only!"); throw new UnsupportedOperationException("RowResult is read-only!");
} }