HBASE-1380 Make KeyValue implement HeapSize and HBASE-1379 Make KeyValue implement Writable
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@774223 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
81c06b9299
commit
734bc8d117
|
@ -232,6 +232,10 @@ Release 0.20.0 - Unreleased
|
|||
HBASE-1345 Remove distributed mode from MiniZooKeeper (Nitay Joffe via
|
||||
Stack)
|
||||
HBASE-1414 Add server status logging chore to ServerManager
|
||||
HBASE-1379 Make KeyValue implement Writable
|
||||
(Erik Holstad and Jon Gray via Stack)
|
||||
HBASE-1380 Make KeyValue implement HeapSize
|
||||
(Erik Holstad and Jon Gray via Stack)
|
||||
|
||||
OPTIMIZATIONS
|
||||
HBASE-1412 Change values for delete column and column family in KeyValue
|
||||
|
|
|
@ -19,12 +19,16 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
|
||||
/**
|
||||
|
@ -45,10 +49,10 @@ import org.apache.hadoop.io.RawComparator;
|
|||
* Byte.MAX_SIZE, and column qualifier + key length must be < Integer.MAX_SIZE.
|
||||
* The column does not contain the family/qualifier delimiter.
|
||||
*
|
||||
* <p>TODO: Group Key-only compartors and operations into a Key class, just
|
||||
* <p>TODO: Group Key-only comparators and operations into a Key class, just
|
||||
* for neatness sake, if can figure what to call it.
|
||||
*/
|
||||
public class KeyValue {
|
||||
public class KeyValue implements Writable, HeapSize {
|
||||
static final Log LOG = LogFactory.getLog(KeyValue.class);
|
||||
|
||||
/**
|
||||
|
@ -190,9 +194,12 @@ public class KeyValue {
|
|||
public static final KeyValue LOWESTKEY =
|
||||
new KeyValue(HConstants.EMPTY_BYTE_ARRAY, HConstants.LATEST_TIMESTAMP);
|
||||
|
||||
private final byte [] bytes;
|
||||
private final int offset;
|
||||
private final int length;
|
||||
private byte [] bytes = null;
|
||||
private int offset = 0;
|
||||
private int length = 0;
|
||||
|
||||
/** Writable Constructor -- DO NOT USE */
|
||||
public KeyValue() {}
|
||||
|
||||
/**
|
||||
* Creates a KeyValue from the start of the specified byte array.
|
||||
|
@ -1420,4 +1427,22 @@ public class KeyValue {
|
|||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// HeapSize
|
||||
public long heapSize() {
|
||||
return this.length;
|
||||
}
|
||||
|
||||
// Writable
|
||||
public void readFields(final DataInput in) throws IOException {
|
||||
this.length = in.readInt();
|
||||
this.offset = 0;
|
||||
this.bytes = new byte[this.length];
|
||||
in.readFully(this.bytes, 0, this.length);
|
||||
}
|
||||
|
||||
public void write(final DataOutput out) throws IOException {
|
||||
out.writeInt(this.length);
|
||||
out.write(this.bytes, this.offset, this.length);
|
||||
}
|
||||
}
|
|
@ -43,6 +43,15 @@ public class TestSerialization extends HBaseTestCase {
|
|||
super.tearDown();
|
||||
}
|
||||
|
||||
public void testKeyValue() throws Exception {
|
||||
byte [] row = Bytes.toBytes(getName());
|
||||
byte [] column = Bytes.toBytes(getName() + ":" + getName());
|
||||
KeyValue original = new KeyValue(row, column);
|
||||
byte [] bytes = Writables.getBytes(original);
|
||||
KeyValue newone = (KeyValue)Writables.getWritable(bytes, new KeyValue());
|
||||
assertTrue(KeyValue.COMPARATOR.compare(original, newone) == 0);
|
||||
}
|
||||
|
||||
public void testHbaseMapWritable() throws Exception {
|
||||
HbaseMapWritable<byte [], byte []> hmw =
|
||||
new HbaseMapWritable<byte[], byte[]>();
|
||||
|
|
Loading…
Reference in New Issue