HBASE-1625 Adding check to Put.add(KeyValue kv), to see that it has the same row as when instantiated
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@792024 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
79e1eed60e
commit
b00a7f271f
|
@ -247,6 +247,8 @@ Release 0.20.0 - Unreleased
|
|||
HBASE-1616 Unit test of compacting referenced StoreFiles (Jon Gray via Stack)
|
||||
HBASE-1618 Investigate further into the MemStoreFlusher StoreFile limit
|
||||
(Jon Gray via Stack)
|
||||
HBASE-1625 Adding check to Put.add(KeyValue kv), to see that it has the same
|
||||
row as when instantiated (Erik Holstad via Stack)
|
||||
|
||||
IMPROVEMENTS
|
||||
HBASE-1089 Add count of regions on filesystem to master UI; add percentage
|
||||
|
|
|
@ -144,12 +144,21 @@ public class Put implements HeapSize, Writable, Comparable<Put> {
|
|||
* Add the specified KeyValue to this Put operation.
|
||||
* @param kv
|
||||
*/
|
||||
public Put add(KeyValue kv) {
|
||||
public Put add(KeyValue kv) throws IOException{
|
||||
byte [] family = kv.getFamily();
|
||||
List<KeyValue> list = familyMap.get(family);
|
||||
if(list == null) {
|
||||
list = new ArrayList<KeyValue>();
|
||||
}
|
||||
//Checking that the row of the kv is the same as the put
|
||||
int res = Bytes.compareTo(this.row, 0, row.length,
|
||||
kv.getBuffer(), kv.getRowOffset(), kv.getRowLength());
|
||||
if(res != 0) {
|
||||
throw new IOException("The row in the recently added KeyValue " +
|
||||
Bytes.toStringBinary(kv.getBuffer(), kv.getRowOffset(),
|
||||
kv.getRowLength()) + " doesn't match the original one " +
|
||||
Bytes.toStringBinary(this.row));
|
||||
}
|
||||
list.add(kv);
|
||||
familyMap.put(family, list);
|
||||
return this;
|
||||
|
|
|
@ -22,17 +22,11 @@ package org.apache.hadoop.hbase.client;
|
|||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseClusterTestCase;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.hbase.io.Cell;
|
||||
import org.apache.hadoop.hbase.io.RowResult;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
|
@ -82,7 +76,6 @@ public class TestPut extends HBaseClusterTestCase {
|
|||
* @throws IOException
|
||||
*/
|
||||
public void testPut() throws IOException {
|
||||
|
||||
Put put = new Put(row1);
|
||||
put.add(CONTENTS_FAMILY, null, value);
|
||||
table.put(put);
|
||||
|
@ -198,5 +191,29 @@ public class TestPut extends HBaseClusterTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testAddKeyValue() throws IOException {
|
||||
byte [] qualifier = Bytes.toBytes("qf1");
|
||||
Put put = new Put(row1);
|
||||
|
||||
//Adding KeyValue with the same row
|
||||
KeyValue kv = new KeyValue(row1, CONTENTS_FAMILY, qualifier, value);
|
||||
boolean ok = true;
|
||||
try {
|
||||
put.add(kv);
|
||||
} catch (IOException e) {
|
||||
ok = false;
|
||||
}
|
||||
assertEquals(true, ok);
|
||||
|
||||
//Adding KeyValue with the different row
|
||||
kv = new KeyValue(row2, CONTENTS_FAMILY, qualifier, value);
|
||||
ok = false;
|
||||
try {
|
||||
put.add(kv);
|
||||
} catch (IOException e) {
|
||||
ok = true;
|
||||
}
|
||||
assertEquals(true, ok);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue