HBASE-5724 Row cache of KeyValue should be cleared in readFields().

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1310065 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-04-05 20:47:38 +00:00
parent 1f253c23fa
commit 7ab743986f
2 changed files with 25 additions and 1 deletions

View File

@ -27,7 +27,6 @@ import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import com.google.common.primitives.Longs;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -39,6 +38,8 @@ import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import com.google.common.primitives.Longs;
/**
* An HBase Key/Value. This is the fundamental HBase Type.
*
@ -2235,6 +2236,7 @@ public class KeyValue implements Writable, HeapSize {
// and it expects the length of the KeyValue to be explicitly passed
// to it.
public void readFields(int length, final DataInput in) throws IOException {
this.rowCache = null;
this.length = length;
this.offset = 0;
this.bytes = new byte[this.length];

View File

@ -19,6 +19,8 @@
*/
package org.apache.hadoop.hbase;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Set;
import java.util.TreeSet;
@ -31,6 +33,7 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.KeyValue.MetaComparator;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.WritableUtils;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
@ -407,6 +410,25 @@ public class TestKeyValue extends TestCase {
kv.toString().replaceAll("=[0-9]+", "=0"));
}
/**
* The row cache is cleared and re-read for the new value
*
* @throws IOException
*/
public void testReadFields() throws IOException {
KeyValue kv1 = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("cf1"),
Bytes.toBytes("qualifier1"), 12345L, Bytes.toBytes("value1"));
kv1.getRow(); // set row cache of kv1
KeyValue kv2 = new KeyValue(Bytes.toBytes("row2"), Bytes.toBytes("cf2"),
Bytes.toBytes("qualifier2"), 12345L, Bytes.toBytes("value2"));
kv1.readFields(new DataInputStream(new ByteArrayInputStream(WritableUtils
.toByteArray(kv2))));
// check equality
assertEquals(kv1, kv2);
// check cache state (getRow() return the cached value if the cache is set)
assertTrue(Bytes.equals(kv1.getRow(), kv2.getRow()));
}
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();