HBASE-876 There are a large number of Java warnings in HBase
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@735946 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4b49a9c162
commit
781b10724f
|
@ -12,8 +12,8 @@ Release 0.20.0 - Unreleased
|
|||
(Samuel Guo via Stack)
|
||||
HBASE-1130 PrefixRowFilter (Michael Gottesman via Stack)
|
||||
HBASE-1139 Update Clover in build.xml
|
||||
HBASE-876 There are a large number of Java warnings in HBase; part 1
|
||||
(Evgeny Ryabitskiy via Stack)
|
||||
HBASE-876 There are a large number of Java warnings in HBase; part 1,
|
||||
part 2, and part 3 (Evgeny Ryabitskiy via Stack)
|
||||
|
||||
Release 0.19.0 - Unreleased
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
|||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.RegionException;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.TableExistsException;
|
||||
import org.apache.hadoop.hbase.io.Cell;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.io.RowResult;
|
||||
|
@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.util.Writables;
|
|||
import org.apache.hadoop.io.BooleanWritable;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.hadoop.util.Shell.ExitCodeException;
|
||||
|
||||
/**
|
||||
* Provides administrative functions for HBase
|
||||
|
@ -133,9 +131,10 @@ public class HBaseAdmin {
|
|||
}
|
||||
|
||||
private long getPauseTime(int tries) {
|
||||
if (tries >= HConstants.RETRY_BACKOFF.length)
|
||||
tries = HConstants.RETRY_BACKOFF.length - 1;
|
||||
return this.pause * HConstants.RETRY_BACKOFF[tries];
|
||||
int triesCount = tries;
|
||||
if (triesCount >= HConstants.RETRY_BACKOFF.length)
|
||||
triesCount = HConstants.RETRY_BACKOFF.length - 1;
|
||||
return this.pause * HConstants.RETRY_BACKOFF[triesCount];
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -534,9 +533,11 @@ public class HBaseAdmin {
|
|||
int xtraArgsCount = 1;
|
||||
Object [] newargs = new Object[len + xtraArgsCount];
|
||||
newargs[0] = regionname;
|
||||
if(args != null) {
|
||||
for (int i = 0; i < len; i++) {
|
||||
newargs[i + xtraArgsCount] = args[i];
|
||||
}
|
||||
}
|
||||
modifyTable(HConstants.META_TABLE_NAME, HConstants.MODIFY_CLOSE_REGION,
|
||||
newargs);
|
||||
}
|
||||
|
|
|
@ -875,20 +875,21 @@ public class HConnectionManager implements HConstants {
|
|||
getRegionLocationForRowWithRetries(byte[] tableName, byte[] rowKey,
|
||||
boolean reload)
|
||||
throws IOException {
|
||||
boolean reloadFlag = reload;
|
||||
getMaster();
|
||||
List<Throwable> exceptions = new ArrayList<Throwable>();
|
||||
HRegionLocation location = null;
|
||||
int tries = 0;
|
||||
while (tries < numRetries) {
|
||||
try {
|
||||
location = getRegionLocation(tableName, rowKey, reload);
|
||||
location = getRegionLocation(tableName, rowKey, reloadFlag);
|
||||
} catch (Throwable t) {
|
||||
exceptions.add(t);
|
||||
}
|
||||
if (location != null) {
|
||||
break;
|
||||
}
|
||||
reload = true;
|
||||
reloadFlag = true;
|
||||
tries++;
|
||||
try {
|
||||
Thread.sleep(getPauseTime(tries));
|
||||
|
|
|
@ -7,7 +7,6 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.io.RowResult;
|
||||
import org.apache.hadoop.hbase.util.Bytes; //TODO: remove
|
||||
|
||||
/**
|
||||
* Scanner class that contains the <code>.META.</code> table scanning logic
|
||||
|
|
|
@ -25,8 +25,10 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
|
|||
/**
|
||||
* Thrown when a scanner has timed out.
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
public class ScannerTimeoutException extends DoNotRetryIOException {
|
||||
|
||||
private static final long serialVersionUID = 8788838690290688313L;
|
||||
|
||||
/** default constructor */
|
||||
ScannerTimeoutException() {
|
||||
super();
|
||||
|
|
|
@ -15,55 +15,46 @@ public class UnmodifyableHColumnDescriptor extends HColumnDescriptor {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setValue(byte[] key, byte[] value) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setValue(String key, String value) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setMaxVersions(int maxVersions) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setInMemory(boolean inMemory) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setBlockCacheEnabled(boolean blockCacheEnabled) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setMaxValueLength(int maxLength) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setTimeToLive(int timeToLive) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setCompressionType(CompressionType type) {
|
||||
throw new UnsupportedOperationException("HColumnDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setMapFileIndexInterval(int interval) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
|
|
@ -75,37 +75,31 @@ public class UnmodifyableHTableDescriptor extends HTableDescriptor {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setInMemory(boolean inMemory) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setReadOnly(boolean readOnly) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setValue(byte[] key, byte[] value) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setValue(String key, String value) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setMaxFileSize(long maxFileSize) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void setMemcacheFlushSize(int memcacheFlushSize) {
|
||||
throw new UnsupportedOperationException("HTableDescriptor is read-only");
|
||||
}
|
||||
|
|
|
@ -137,7 +137,6 @@ public class IndexSpecification implements Writable {
|
|||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@SuppressWarnings("unchecked")
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
indexId = in.readUTF();
|
||||
int numIndexedCols = in.readInt();
|
||||
|
|
|
@ -48,7 +48,7 @@ public class IndexedTable extends TransactionalTable {
|
|||
public static final byte[] INDEX_BASE_ROW_COLUMN = Bytes.add(
|
||||
INDEX_COL_FAMILY, Bytes.toBytes("ROW"));
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(IndexedTable.class);
|
||||
static final Log LOG = LogFactory.getLog(IndexedTable.class);
|
||||
|
||||
private Map<String, HTable> indexIdToTable = new HashMap<String, HTable>();
|
||||
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
|||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.TableExistsException;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
|
|
|
@ -22,9 +22,10 @@ package org.apache.hadoop.hbase.client.transactional;
|
|||
/** Thrown when a transaction cannot be committed.
|
||||
*
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
public class CommitUnsuccessfulException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 7062921444531109202L;
|
||||
|
||||
/** Default Constructor */
|
||||
public CommitUnsuccessfulException() {
|
||||
super();
|
||||
|
|
|
@ -24,8 +24,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
|
|||
/**
|
||||
* Thrown if a region server is passed an unknown transaction id
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
public class UnknownTransactionException extends DoNotRetryIOException {
|
||||
public class UnknownTransactionException extends DoNotRetryIOException {
|
||||
|
||||
private static final long serialVersionUID = 698575374929591099L;
|
||||
|
||||
/** constructor */
|
||||
public UnknownTransactionException() {
|
||||
|
|
|
@ -122,11 +122,11 @@ public class ColumnValueFilter implements RowFilterInterface {
|
|||
this.filterIfColumnMissing = filterIfColumnMissing;
|
||||
}
|
||||
|
||||
public boolean filterRowKey(@SuppressWarnings("unused") final byte[] rowKey) {
|
||||
public boolean filterRowKey(final byte[] rowKey) {
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean filterColumn(@SuppressWarnings("unused") final byte[] rowKey,
|
||||
public boolean filterColumn(final byte[] rowKey,
|
||||
final byte[] colKey, final byte[] data) {
|
||||
if (!filterIfColumnMissing) {
|
||||
return false; // Must filter on the whole row
|
||||
|
@ -201,12 +201,12 @@ public class ColumnValueFilter implements RowFilterInterface {
|
|||
// Nothing.
|
||||
}
|
||||
|
||||
public void rowProcessed(@SuppressWarnings("unused") final boolean filtered,
|
||||
@SuppressWarnings("unused") final byte[] key) {
|
||||
public void rowProcessed(final boolean filtered,
|
||||
final byte[] key) {
|
||||
// Nothing
|
||||
}
|
||||
|
||||
public void validate(@SuppressWarnings("unused") final byte[][] columns) {
|
||||
public void validate(final byte[][] columns) {
|
||||
// Nothing
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class PageRowFilter implements RowFilterInterface {
|
|||
this.pageSize = pageSize;
|
||||
}
|
||||
|
||||
public void validate(@SuppressWarnings("unused") final byte [][] columns) {
|
||||
public void validate(final byte [][] columns) {
|
||||
// Doesn't filter columns
|
||||
}
|
||||
|
||||
|
@ -70,7 +70,7 @@ public class PageRowFilter implements RowFilterInterface {
|
|||
}
|
||||
|
||||
public void rowProcessed(boolean filtered,
|
||||
@SuppressWarnings("unused") byte [] rowKey) {
|
||||
byte [] rowKey) {
|
||||
if (!filtered) {
|
||||
this.rowsAccepted++;
|
||||
}
|
||||
|
@ -84,18 +84,17 @@ public class PageRowFilter implements RowFilterInterface {
|
|||
return this.rowsAccepted > this.pageSize;
|
||||
}
|
||||
|
||||
public boolean filterRowKey(@SuppressWarnings("unused") final byte [] r) {
|
||||
public boolean filterRowKey(final byte [] r) {
|
||||
return filterAllRemaining();
|
||||
}
|
||||
|
||||
public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey,
|
||||
@SuppressWarnings("unused") final byte [] colKey,
|
||||
@SuppressWarnings("unused") final byte[] data) {
|
||||
public boolean filterColumn(final byte [] rowKey,
|
||||
final byte [] colKey,
|
||||
final byte[] data) {
|
||||
return filterAllRemaining();
|
||||
}
|
||||
|
||||
public boolean filterRow(@SuppressWarnings("unused")
|
||||
final SortedMap<byte [], Cell> columns) {
|
||||
public boolean filterRow(final SortedMap<byte [], Cell> columns) {
|
||||
return filterAllRemaining();
|
||||
}
|
||||
|
||||
|
|
|
@ -44,15 +44,12 @@ public class PrefixRowFilter implements RowFilterInterface {
|
|||
* Default Constructor, filters nothing. Required for RPC
|
||||
* deserialization
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
public PrefixRowFilter() { }
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void reset() {
|
||||
// Nothing to reset
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void rowProcessed(boolean filtered, byte [] key) {
|
||||
// does not care
|
||||
}
|
||||
|
@ -76,18 +73,15 @@ public class PrefixRowFilter implements RowFilterInterface {
|
|||
return false;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public boolean filterColumn(final byte [] rowKey, final byte [] colunmName,
|
||||
final byte[] columnValue) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public boolean filterRow(final SortedMap<byte [], Cell> columns) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void validate(final byte [][] columns) {
|
||||
// does not do this
|
||||
}
|
||||
|
|
|
@ -86,7 +86,6 @@ public class RegExpRowFilter implements RowFilterInterface {
|
|||
this.setColumnFilters(columnFilter);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void rowProcessed(boolean filtered, byte [] rowKey) {
|
||||
//doesn't care
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
return this.stopRowKey;
|
||||
}
|
||||
|
||||
public void validate(@SuppressWarnings("unused") final byte [][] columns) {
|
||||
public void validate(final byte [][] columns) {
|
||||
// Doesn't filter columns
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,6 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
// Nothing to reset
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public void rowProcessed(boolean filtered, byte [] rowKey) {
|
||||
// Doesn't care
|
||||
}
|
||||
|
@ -96,9 +95,8 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
* Because StopRowFilter does not examine column information, this method
|
||||
* defaults to calling the rowKey-only version of filter.
|
||||
*/
|
||||
public boolean filterColumn(@SuppressWarnings("unused") final byte [] rowKey,
|
||||
@SuppressWarnings("unused") final byte [] colKey,
|
||||
@SuppressWarnings("unused") final byte[] data) {
|
||||
public boolean filterColumn(final byte [] rowKey, final byte [] colKey,
|
||||
final byte[] data) {
|
||||
return filterRowKey(rowKey);
|
||||
}
|
||||
|
||||
|
@ -106,8 +104,7 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
* Because StopRowFilter does not examine column information, this method
|
||||
* defaults to calling filterAllRemaining().
|
||||
*/
|
||||
public boolean filterRow(@SuppressWarnings("unused")
|
||||
final SortedMap<byte [], Cell> columns) {
|
||||
public boolean filterRow(final SortedMap<byte [], Cell> columns) {
|
||||
return filterAllRemaining();
|
||||
}
|
||||
|
||||
|
|
|
@ -94,6 +94,8 @@ public class BlockFSInputStream extends FSInputStream {
|
|||
// A memory-sensitive map that has soft references to values
|
||||
this.blocks = new SoftValueMap<Long, byte []>() {
|
||||
private long hits, misses;
|
||||
|
||||
@Override
|
||||
public byte [] get(Object key) {
|
||||
byte [] value = super.get(key);
|
||||
if (value == null) {
|
||||
|
@ -140,7 +142,6 @@ public class BlockFSInputStream extends FSInputStream {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public synchronized boolean seekToNewSource(long targetPos)
|
||||
throws IOException {
|
||||
return false;
|
||||
|
@ -234,7 +235,6 @@ public class BlockFSInputStream extends FSInputStream {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void mark(int readLimit) {
|
||||
// Do nothing
|
||||
}
|
||||
|
|
|
@ -42,8 +42,11 @@ import org.onelab.filter.Key;
|
|||
* tested first against bloom filter. Keys are HStoreKey. If passed bloom
|
||||
* filter is null, just passes invocation to parent.
|
||||
*/
|
||||
// TODO should be fixed generic warnings from MapFile methods
|
||||
@SuppressWarnings("unchecked")
|
||||
public class BloomFilterMapFile extends HBaseMapFile {
|
||||
private static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class);
|
||||
@SuppressWarnings("hiding")
|
||||
static final Log LOG = LogFactory.getLog(BloomFilterMapFile.class);
|
||||
protected static final String BLOOMFILTER_FILE_NAME = "filter";
|
||||
|
||||
public static class Reader extends HBaseReader {
|
||||
|
@ -148,7 +151,6 @@ public class BloomFilterMapFile extends HBaseMapFile {
|
|||
* @param hri
|
||||
* @throws IOException
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Writer(Configuration conf, FileSystem fs, String dirName,
|
||||
SequenceFile.CompressionType compression, final boolean filter,
|
||||
int nrows, final HRegionInfo hri)
|
||||
|
|
|
@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.io;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -35,7 +33,8 @@ import org.apache.hadoop.io.Writable;
|
|||
* HBase customizations of MapFile.
|
||||
*/
|
||||
public class HBaseMapFile extends MapFile {
|
||||
private static final Log LOG = LogFactory.getLog(HBaseMapFile.class);
|
||||
// TODO not used. remove?!
|
||||
// private static final Log LOG = LogFactory.getLog(HBaseMapFile.class);
|
||||
|
||||
/**
|
||||
* Values are instances of this class.
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.apache.hadoop.io.WritableComparable;
|
|||
*
|
||||
* <p>This file is not splitable. Calls to {@link #midKey()} return null.
|
||||
*/
|
||||
//TODO should be fixed generic warnings from MapFile methods
|
||||
public class HalfMapFileReader extends BloomFilterMapFile.Reader {
|
||||
private final boolean top;
|
||||
private final HStoreKey midkey;
|
||||
|
@ -76,7 +77,6 @@ public class HalfMapFileReader extends BloomFilterMapFile.Reader {
|
|||
* @param hri
|
||||
* @throws IOException
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public HalfMapFileReader(final FileSystem fs, final String dirName,
|
||||
final Configuration conf, final Range r,
|
||||
final WritableComparable<HStoreKey> mk, final boolean filter,
|
||||
|
@ -164,7 +164,7 @@ public class HalfMapFileReader extends BloomFilterMapFile.Reader {
|
|||
return closest;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unused", "unchecked"})
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public synchronized WritableComparable midKey() throws IOException {
|
||||
// Returns null to indicate file is not splitable.
|
||||
|
|
|
@ -45,7 +45,7 @@ import org.apache.hadoop.util.ReflectionUtils;
|
|||
* if passed a value type that it has not already been told about. Its been
|
||||
* primed with hbase Writables and byte []. Keys are always byte arrays.
|
||||
*
|
||||
* @param <byte []> key
|
||||
* @param <byte []> key TODO: Parameter K is never used, could be removed.
|
||||
* @param <V> value Expects a Writable or byte [].
|
||||
*/
|
||||
public class HbaseMapWritable <K, V>
|
||||
|
@ -164,13 +164,13 @@ implements SortedMap<byte [], V>, Writable, Configurable {
|
|||
// Writable
|
||||
|
||||
/** @return the Class class for the specified id */
|
||||
@SuppressWarnings({ "unchecked", "boxing" })
|
||||
@SuppressWarnings("boxing")
|
||||
protected Class<?> getClass(byte id) {
|
||||
return CODE_TO_CLASS.get(id);
|
||||
}
|
||||
|
||||
/** @return the id for the specified Class */
|
||||
@SuppressWarnings({ "unchecked", "boxing" })
|
||||
@SuppressWarnings("boxing")
|
||||
protected byte getId(Class<?> clazz) {
|
||||
Byte b = CLASS_TO_CODE.get(clazz);
|
||||
if (b == null) {
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface;
|
|||
import org.apache.hadoop.hbase.filter.RowFilterSet;
|
||||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||
import org.apache.hadoop.io.MapWritable;
|
||||
import org.apache.hadoop.io.ObjectWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableFactories;
|
||||
|
@ -243,51 +242,54 @@ public class HbaseObjectWritable implements Writable, Configurable {
|
|||
Configuration conf)
|
||||
throws IOException {
|
||||
|
||||
if (instance == null) { // null
|
||||
instance = new NullInstance(declaredClass, conf);
|
||||
declaredClass = Writable.class;
|
||||
Object instanceObj = instance;
|
||||
Class declClass = declaredClass;
|
||||
|
||||
if (instanceObj == null) { // null
|
||||
instanceObj = new NullInstance(declClass, conf);
|
||||
declClass = Writable.class;
|
||||
}
|
||||
writeClassCode(out, declaredClass);
|
||||
if (declaredClass.isArray()) { // array
|
||||
writeClassCode(out, declClass);
|
||||
if (declClass.isArray()) { // array
|
||||
// If bytearray, just dump it out -- avoid the recursion and
|
||||
// byte-at-a-time we were previously doing.
|
||||
if (declaredClass.equals(byte [].class)) {
|
||||
Bytes.writeByteArray(out, (byte [])instance);
|
||||
if (declClass.equals(byte [].class)) {
|
||||
Bytes.writeByteArray(out, (byte [])instanceObj);
|
||||
} else {
|
||||
int length = Array.getLength(instance);
|
||||
int length = Array.getLength(instanceObj);
|
||||
out.writeInt(length);
|
||||
for (int i = 0; i < length; i++) {
|
||||
writeObject(out, Array.get(instance, i),
|
||||
declaredClass.getComponentType(), conf);
|
||||
writeObject(out, Array.get(instanceObj, i),
|
||||
declClass.getComponentType(), conf);
|
||||
}
|
||||
}
|
||||
} else if (declaredClass == String.class) { // String
|
||||
Text.writeString(out, (String)instance);
|
||||
} else if (declaredClass.isPrimitive()) { // primitive type
|
||||
if (declaredClass == Boolean.TYPE) { // boolean
|
||||
out.writeBoolean(((Boolean)instance).booleanValue());
|
||||
} else if (declaredClass == Character.TYPE) { // char
|
||||
out.writeChar(((Character)instance).charValue());
|
||||
} else if (declaredClass == Byte.TYPE) { // byte
|
||||
out.writeByte(((Byte)instance).byteValue());
|
||||
} else if (declaredClass == Short.TYPE) { // short
|
||||
out.writeShort(((Short)instance).shortValue());
|
||||
} else if (declaredClass == Integer.TYPE) { // int
|
||||
out.writeInt(((Integer)instance).intValue());
|
||||
} else if (declaredClass == Long.TYPE) { // long
|
||||
out.writeLong(((Long)instance).longValue());
|
||||
} else if (declaredClass == Float.TYPE) { // float
|
||||
out.writeFloat(((Float)instance).floatValue());
|
||||
} else if (declaredClass == Double.TYPE) { // double
|
||||
out.writeDouble(((Double)instance).doubleValue());
|
||||
} else if (declaredClass == Void.TYPE) { // void
|
||||
} else if (declClass == String.class) { // String
|
||||
Text.writeString(out, (String)instanceObj);
|
||||
} else if (declClass.isPrimitive()) { // primitive type
|
||||
if (declClass == Boolean.TYPE) { // boolean
|
||||
out.writeBoolean(((Boolean)instanceObj).booleanValue());
|
||||
} else if (declClass == Character.TYPE) { // char
|
||||
out.writeChar(((Character)instanceObj).charValue());
|
||||
} else if (declClass == Byte.TYPE) { // byte
|
||||
out.writeByte(((Byte)instanceObj).byteValue());
|
||||
} else if (declClass == Short.TYPE) { // short
|
||||
out.writeShort(((Short)instanceObj).shortValue());
|
||||
} else if (declClass == Integer.TYPE) { // int
|
||||
out.writeInt(((Integer)instanceObj).intValue());
|
||||
} else if (declClass == Long.TYPE) { // long
|
||||
out.writeLong(((Long)instanceObj).longValue());
|
||||
} else if (declClass == Float.TYPE) { // float
|
||||
out.writeFloat(((Float)instanceObj).floatValue());
|
||||
} else if (declClass == Double.TYPE) { // double
|
||||
out.writeDouble(((Double)instanceObj).doubleValue());
|
||||
} else if (declClass == Void.TYPE) { // void
|
||||
} else {
|
||||
throw new IllegalArgumentException("Not a primitive: "+declaredClass);
|
||||
throw new IllegalArgumentException("Not a primitive: "+declClass);
|
||||
}
|
||||
} else if (declaredClass.isEnum()) { // enum
|
||||
Text.writeString(out, ((Enum)instance).name());
|
||||
} else if (Writable.class.isAssignableFrom(declaredClass)) { // Writable
|
||||
Class <?> c = instance.getClass();
|
||||
} else if (declClass.isEnum()) { // enum
|
||||
Text.writeString(out, ((Enum)instanceObj).name());
|
||||
} else if (Writable.class.isAssignableFrom(declClass)) { // Writable
|
||||
Class <?> c = instanceObj.getClass();
|
||||
Byte code = CLASS_TO_CODE.get(c);
|
||||
if (code == null) {
|
||||
out.writeByte(NOT_ENCODED);
|
||||
|
@ -295,9 +297,9 @@ public class HbaseObjectWritable implements Writable, Configurable {
|
|||
} else {
|
||||
writeClassCode(out, c);
|
||||
}
|
||||
((Writable)instance).write(out);
|
||||
((Writable)instanceObj).write(out);
|
||||
} else {
|
||||
throw new IOException("Can't write: "+instance+" as "+declaredClass);
|
||||
throw new IOException("Can't write: "+instanceObj+" as "+declClass);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.io.WritableComparator;
|
|||
* capacity as {@link org.apache.hadoop.io.BytesWritable} does. Hence its
|
||||
* comparatively 'immutable'.
|
||||
*/
|
||||
public class ImmutableBytesWritable implements WritableComparable {
|
||||
public class ImmutableBytesWritable implements WritableComparable<ImmutableBytesWritable> {
|
||||
private byte[] bytes;
|
||||
|
||||
/**
|
||||
|
@ -129,8 +129,8 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
* @return Positive if left is bigger than right, 0 if they are equal, and
|
||||
* negative if left is smaller than right.
|
||||
*/
|
||||
public int compareTo(Object right_obj) {
|
||||
return compareTo(((ImmutableBytesWritable)right_obj).get());
|
||||
public int compareTo(ImmutableBytesWritable right_obj) {
|
||||
return compareTo(right_obj.get());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -153,7 +153,7 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
return compareTo((byte [])right_obj) == 0;
|
||||
}
|
||||
if (right_obj instanceof ImmutableBytesWritable) {
|
||||
return compareTo(right_obj) == 0;
|
||||
return compareTo((ImmutableBytesWritable)right_obj) == 0;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -71,13 +71,13 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
|
|||
// Map interface
|
||||
//
|
||||
|
||||
public Cell put(@SuppressWarnings("unused") byte [] key,
|
||||
@SuppressWarnings("unused") Cell value) {
|
||||
public Cell put(byte [] key,
|
||||
Cell value) {
|
||||
throw new UnsupportedOperationException("RowResult is read-only!");
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void putAll(@SuppressWarnings("unused") Map map) {
|
||||
public void putAll(Map map) {
|
||||
throw new UnsupportedOperationException("RowResult is read-only!");
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
|
|||
return this.cells.get(key);
|
||||
}
|
||||
|
||||
public Cell remove(@SuppressWarnings("unused") Object key) {
|
||||
public Cell remove(Object key) {
|
||||
throw new UnsupportedOperationException("RowResult is read-only!");
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
|
|||
return cells.containsKey(Bytes.toBytes(key));
|
||||
}
|
||||
|
||||
public boolean containsValue(@SuppressWarnings("unused") Object value) {
|
||||
public boolean containsValue(Object value) {
|
||||
throw new UnsupportedOperationException("Don't support containsValue!");
|
||||
}
|
||||
|
||||
|
@ -188,7 +188,7 @@ public class RowResult implements Writable, SortedMap<byte [], Cell> {
|
|||
this.cell = cell;
|
||||
}
|
||||
|
||||
public Cell setValue(@SuppressWarnings("unused") Cell c) {
|
||||
public Cell setValue(Cell c) {
|
||||
throw new UnsupportedOperationException("RowResult is read-only!");
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue