- * This interface should not include methods that allocate new byte[]'s such as those used in client
- * or debugging code. These users should use the methods found in the {@link CellUtil} class.
- * Currently for to minimize the impact of existing applications moving between 0.94 and 0.96, we
- * include the costly helper methods marked as deprecated.
- *
- *
* Cell implements Comparable<Cell> which is only meaningful when
* comparing to other keys in the
* same table. It uses CellComparator which does not work on the -ROOT- and hbase:meta tables.
@@ -146,19 +140,7 @@ public interface Cell {
byte getTypeByte();
- //6) MvccVersion
-
- /**
- * @deprecated as of 1.0, use {@link Cell#getSequenceId()}
- *
- * Internal use only. A region-specific sequence ID given to each operation. It always exists for
- * cells in the memstore but is not retained forever. It may survive several flushes, but
- * generally becomes irrelevant after the cell's row is no longer involved in any operations that
- * require strict consistency.
- * @return mvccVersion (always >= 0 if exists), or 0 if it no longer exists
- */
- @Deprecated
- long getMvccVersion();
+ //6) SequenceId
/**
* A region-specific unique monotonically increasing sequence ID given to each Cell. It always
@@ -187,7 +169,7 @@ public interface Cell {
* @return Number of value bytes. Must be < valueArray.length - offset.
*/
int getValueLength();
-
+
/**
* @return the tags byte array
*/
@@ -202,44 +184,4 @@ public interface Cell {
* @return the total length of the tags in the Cell.
*/
int getTagsLength();
-
- /**
- * WARNING do not use, expensive. This gets an arraycopy of the cell's value.
- *
- * Added to ease transition from 0.94 -> 0.96.
- *
- * @deprecated as of 0.96, use {@link CellUtil#cloneValue(Cell)}
- */
- @Deprecated
- byte[] getValue();
-
- /**
- * WARNING do not use, expensive. This gets an arraycopy of the cell's family.
- *
- * Added to ease transition from 0.94 -> 0.96.
- *
- * @deprecated as of 0.96, use {@link CellUtil#cloneFamily(Cell)}
- */
- @Deprecated
- byte[] getFamily();
-
- /**
- * WARNING do not use, expensive. This gets an arraycopy of the cell's qualifier.
- *
- * Added to ease transition from 0.94 -> 0.96.
- *
- * @deprecated as of 0.96, use {@link CellUtil#cloneQualifier(Cell)}
- */
- @Deprecated
- byte[] getQualifier();
-
- /**
- * WARNING do not use, expensive. this gets an arraycopy of the cell's row.
- *
- * Added to ease transition from 0.94 -> 0.96.
- *
- * @deprecated as of 0.96, use {@link CellUtil#getRowByte(Cell, int)}
- */
- @Deprecated
- byte[] getRow();
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index a9b59e525bb..207f2752cff 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -1335,11 +1335,6 @@ public final class CellUtil {
return 0;
}
- @Override
- public long getMvccVersion() {
- return getSequenceId();
- }
-
@Override
public long getSequenceId() {
return 0;
@@ -1374,26 +1369,6 @@ public final class CellUtil {
public int getTagsLength() {
return 0;
}
-
- @Override
- public byte[] getValue() {
- return EMPTY_BYTE_ARRAY;
- }
-
- @Override
- public byte[] getFamily() {
- return EMPTY_BYTE_ARRAY;
- }
-
- @Override
- public byte[] getQualifier() {
- return EMPTY_BYTE_ARRAY;
- }
-
- @Override
- public byte[] getRow() {
- return EMPTY_BYTE_ARRAY;
- }
}
@InterfaceAudience.Private
@@ -1432,11 +1407,6 @@ public final class CellUtil {
public byte getTypeByte() {
return Type.Maximum.getCode();
}
-
- @Override
- public byte[] getRow() {
- return Bytes.copy(this.rowArray, this.roffset, this.rlength);
- }
}
@InterfaceAudience.Private
@@ -1488,16 +1458,6 @@ public final class CellUtil {
public int getQualifierLength() {
return this.qlength;
}
-
- @Override
- public byte[] getFamily() {
- return Bytes.copy(this.fArray, this.foffset, this.flength);
- }
-
- @Override
- public byte[] getQualifier() {
- return Bytes.copy(this.qArray, this.qoffset, this.qlength);
- }
}
@InterfaceAudience.Private
@@ -1553,11 +1513,6 @@ public final class CellUtil {
public byte getTypeByte() {
return Type.Minimum.getCode();
}
-
- @Override
- public byte[] getRow() {
- return Bytes.copy(this.rowArray, this.roffset, this.rlength);
- }
}
@InterfaceAudience.Private
@@ -1609,15 +1564,5 @@ public final class CellUtil {
public int getQualifierLength() {
return this.qlength;
}
-
- @Override
- public byte[] getFamily() {
- return Bytes.copy(this.fArray, this.foffset, this.flength);
- }
-
- @Override
- public byte[] getQualifier() {
- return Bytes.copy(this.qArray, this.qoffset, this.qlength);
- }
}
}
\ No newline at end of file
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 2fc79754077..8c739848fa3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -50,12 +50,13 @@ import com.google.common.annotations.VisibleForTesting;
/**
* An HBase Key/Value. This is the fundamental HBase Type.
*
- * HBase applications and users should use the Cell interface and avoid directly using KeyValue
- * and member functions not defined in Cell.
+ * HBase applications and users should use the Cell interface and avoid directly using KeyValue and
+ * member functions not defined in Cell.
*
- * If being used client-side, the primary methods to access individual fields are {@link #getRow()},
- * {@link #getFamily()}, {@link #getQualifier()}, {@link #getTimestamp()}, and {@link #getValue()}.
- * These methods allocate new byte arrays and return copies. Avoid their use server-side.
+ * If being used client-side, the primary methods to access individual fields are
+ * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()},
+ * {@link #getTimestamp()}, and {@link #getValueArray()}. These methods allocate new byte arrays
+ * and return copies. Avoid their use server-side.
*
* Instances of this class are immutable. They do not implement Comparable but Comparators are
* provided. Comparators change with context, whether user table or a catalog table comparison. Its
@@ -64,23 +65,20 @@ import com.google.common.annotations.VisibleForTesting;
*
* KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start
* interpreting the content as KeyValue. The KeyValue format inside a byte array is:
- * <keylength> <valuelength> <key> <value>
- * Key is further decomposed as:
- * <rowlength> <row> <columnfamilylength>
+ * <keylength> <valuelength> <key> <value> Key is further
+ * decomposed as: <rowlength> <row> <columnfamilylength>
* <columnfamily> <columnqualifier>
- * <timestamp> <keytype>
- * The rowlength maximum is Short.MAX_SIZE, column family length maximum
- * is Byte.MAX_SIZE, and column qualifier + key length must be <
- * Integer.MAX_SIZE. The column does not contain the family/qualifier delimiter,
- * {@link #COLUMN_FAMILY_DELIMITER}
+ * <timestamp> <keytype> The rowlength maximum is
+ * Short.MAX_SIZE, column family length maximum is Byte.MAX_SIZE, and
+ * column qualifier + key length must be < Integer.MAX_SIZE. The column does not
+ * contain the family/qualifier delimiter, {@link #COLUMN_FAMILY_DELIMITER}
* KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after
* the value part. The format for this part is: <tagslength><tagsbytes>.
* tagslength maximum is Short.MAX_SIZE. The tagsbytes
* contain one or more tags where as each tag is of the form
- * <taglength><tagtype><tagbytes>.
- * tagtype is one byte and
- * taglength maximum is Short.MAX_SIZE and it includes 1 byte type length
- * and actual tag bytes length.
+ * <taglength><tagtype><tagbytes>. tagtype is one byte
+ * and taglength maximum is Short.MAX_SIZE and it includes 1 byte type
+ * length and actual tag bytes length.
*/
@InterfaceAudience.Private
public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
@@ -296,12 +294,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
/** Here be dragons **/
- // used to achieve atomic operations in the memstore.
- @Override
- public long getMvccVersion() {
- return this.getSequenceId();
- }
-
/**
* used to achieve atomic operations in the memstore.
*/
@@ -1172,9 +1164,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
*/
public Map toStringMap() {
Map stringMap = new HashMap();
- stringMap.put("row", Bytes.toStringBinary(getRow()));
- stringMap.put("family", Bytes.toStringBinary(getFamily()));
- stringMap.put("qualifier", Bytes.toStringBinary(getQualifier()));
+ stringMap.put("row", Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()));
+ stringMap.put("family",
+ Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength()));
+ stringMap.put("qualifier",
+ Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(), getQualifierLength()));
stringMap.put("timestamp", getTimestamp());
stringMap.put("vlen", getValueLength());
List tags = getTags();
@@ -1472,10 +1466,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
//---------------------------------------------------------------------------
/**
- * Do not use unless you have to. Used internally for compacting and testing.
- *
- * Use {@link #getRow()}, {@link #getFamily()}, {@link #getQualifier()}, and
- * {@link #getValue()} if accessing a KeyValue client-side.
+ * Do not use unless you have to. Used internally for compacting and testing. Use
+ * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()}, and
+ * {@link #getValueArray()} if accessing a KeyValue client-side.
* @return Copy of the key portion only.
*/
public byte [] getKey() {
@@ -1485,33 +1478,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return key;
}
- /**
- * Returns value in a new byte array.
- * Primarily for use client-side. If server-side, use
- * {@link #getBuffer()} with appropriate offsets and lengths instead to
- * save on allocations.
- * @return Value in a new byte array.
- */
- @Override
- @Deprecated // use CellUtil.getValueArray()
- public byte [] getValue() {
- return CellUtil.cloneValue(this);
- }
-
- /**
- * Primarily for use client-side. Returns the row of this KeyValue in a new
- * byte array.
- *
- * If server-side, use {@link #getBuffer()} with appropriate offsets and
- * lengths instead.
- * @return Row in a new byte array.
- */
- @Override
- @Deprecated // use CellUtil.getRowArray()
- public byte [] getRow() {
- return CellUtil.cloneRow(this);
- }
-
/**
*
* @return Timestamp
@@ -1556,35 +1522,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return KeyValue.isDelete(getType());
}
- /**
- * Primarily for use client-side. Returns the family of this KeyValue in a
- * new byte array.
- *
- * If server-side, use {@link #getBuffer()} with appropriate offsets and
- * lengths instead.
- * @return Returns family. Makes a copy.
- */
- @Override
- @Deprecated // use CellUtil.getFamilyArray
- public byte [] getFamily() {
- return CellUtil.cloneFamily(this);
- }
-
- /**
- * Primarily for use client-side. Returns the column qualifier of this
- * KeyValue in a new byte array.
- *
- * If server-side, use {@link #getBuffer()} with appropriate offsets and
- * lengths instead.
- * Use {@link #getBuffer()} with appropriate offsets and lengths instead.
- * @return Returns qualifier. Makes a copy.
- */
- @Override
- @Deprecated // use CellUtil.getQualifierArray
- public byte [] getQualifier() {
- return CellUtil.cloneQualifier(this);
- }
-
/**
* This returns the offset where the tag actually starts.
*/
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
index 50a409d8a6b..0de627a6451 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
@@ -73,9 +73,9 @@ public class KeyValueTestUtil {
/**
* Checks whether KeyValues from kvCollection2 are contained in kvCollection1.
- *
+ *
* The comparison is made without distinguishing MVCC version of the KeyValues
- *
+ *
* @param kvCollection1
* @param kvCollection2
* @return true if KeyValues from kvCollection2 are contained in kvCollection1
@@ -91,7 +91,7 @@ public class KeyValueTestUtil {
}
return true;
}
-
+
public static List rewindThenToList(final ByteBuffer bb,
final boolean includesMemstoreTS, final boolean useTags) {
bb.rewind();
@@ -161,15 +161,16 @@ public class KeyValueTestUtil {
}
protected static String getRowString(final KeyValue kv) {
- return Bytes.toStringBinary(kv.getRow());
+ return Bytes.toStringBinary(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength());
}
protected static String getFamilyString(final KeyValue kv) {
- return Bytes.toStringBinary(kv.getFamily());
+ return Bytes.toStringBinary(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength());
}
protected static String getQualifierString(final KeyValue kv) {
- return Bytes.toStringBinary(kv.getQualifier());
+ return Bytes.toStringBinary(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength());
}
protected static String getTimestampString(final KeyValue kv) {
@@ -177,11 +178,11 @@ public class KeyValueTestUtil {
}
protected static String getTypeString(final KeyValue kv) {
- return KeyValue.Type.codeToType(kv.getType()).toString();
+ return KeyValue.Type.codeToType(kv.getTypeByte()).toString();
}
protected static String getValueString(final KeyValue kv) {
- return Bytes.toStringBinary(kv.getValue());
+ return Bytes.toStringBinary(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
index 74a26a27a6b..59519e0c641 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java
@@ -81,7 +81,7 @@ public class KeyValueUtil {
public static int lengthWithMvccVersion(final KeyValue kv, final boolean includeMvccVersion) {
int length = kv.getLength();
if (includeMvccVersion) {
- length += WritableUtils.getVIntSize(kv.getMvccVersion());
+ length += WritableUtils.getVIntSize(kv.getSequenceId());
}
return length;
}
@@ -101,7 +101,7 @@ public class KeyValueUtil {
public static KeyValue copyToNewKeyValue(final Cell cell) {
byte[] bytes = copyToNewByteArray(cell);
KeyValue kvCell = new KeyValue(bytes, 0, bytes.length);
- kvCell.setSequenceId(cell.getMvccVersion());
+ kvCell.setSequenceId(cell.getSequenceId());
return kvCell;
}
@@ -173,9 +173,9 @@ public class KeyValueUtil {
bb.limit(bb.position() + kv.getLength());
bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includeMvccVersion) {
- int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getMvccVersion());
+ int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getSequenceId());
ByteBufferUtils.extendLimit(bb, numMvccVersionBytes);
- ByteBufferUtils.writeVLong(bb, kv.getMvccVersion());
+ ByteBufferUtils.writeVLong(bb, kv.getSequenceId());
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
index 2ed89f17f0f..a54c76eed8f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java
@@ -56,7 +56,7 @@ public class CellCodec implements Codec {
// Value
write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
// MvccVersion
- this.out.write(Bytes.toBytes(cell.getMvccVersion()));
+ this.out.write(Bytes.toBytes(cell.getSequenceId()));
}
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
index a614026c1a9..d79be175214 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
@@ -58,7 +58,7 @@ public class CellCodecWithTags implements Codec {
// Tags
write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
// MvccVersion
- this.out.write(Bytes.toBytes(cell.getMvccVersion()));
+ this.out.write(Bytes.toBytes(cell.getSequenceId()));
}
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
index 5107271859d..03875dc3b6a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
@@ -24,13 +24,12 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
-import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.SettableSequenceId;
+import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteBufferOutputStream;
import org.apache.hadoop.hbase.io.HeapSize;
@@ -256,11 +255,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return currentKey.getTypeByte();
}
- @Override
- public long getMvccVersion() {
- return memstoreTS;
- }
-
@Override
public long getSequenceId() {
return memstoreTS;
@@ -302,30 +296,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return tagsLength;
}
- @Override
- @Deprecated
- public byte[] getValue() {
- throw new UnsupportedOperationException("getValue() not supported");
- }
-
- @Override
- @Deprecated
- public byte[] getFamily() {
- throw new UnsupportedOperationException("getFamily() not supported");
- }
-
- @Override
- @Deprecated
- public byte[] getQualifier() {
- throw new UnsupportedOperationException("getQualifier() not supported");
- }
-
- @Override
- @Deprecated
- public byte[] getRow() {
- throw new UnsupportedOperationException("getRow() not supported");
- }
-
@Override
public String toString() {
return KeyValue.keyToString(this.keyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="
@@ -334,7 +304,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
public Cell shallowCopy() {
return new ClonedSeekerState(currentBuffer, keyBuffer, currentKey.getRowLength(),
- currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength,
+ currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength,
currentKey.getQualifierOffset(), currentKey.getQualifierLength(),
currentKey.getTimestamp(), currentKey.getTypeByte(), valueLength, valueOffset,
memstoreTS, tagsOffset, tagsLength, tagCompressionContext, tagsBuffer);
@@ -342,9 +312,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
}
/**
- * Copies only the key part of the keybuffer by doing a deep copy and passes the
+ * Copies only the key part of the keybuffer by doing a deep copy and passes the
* seeker state members for taking a clone.
- * Note that the value byte[] part is still pointing to the currentBuffer and the
+ * Note that the value byte[] part is still pointing to the currentBuffer and the
* represented by the valueOffset and valueLength
*/
// We return this as a Cell to the upper layers of read flow and might try setting a new SeqId
@@ -372,7 +342,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
private byte[] cloneTagsBuffer;
private long seqId;
private TagCompressionContext tagCompressionContext;
-
+
protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength,
int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength,
long timeStamp, byte typeByte, int valueLen, int valueOffset, long seqId,
@@ -455,12 +425,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return typeByte;
}
- @Override
- @Deprecated
- public long getMvccVersion() {
- return getSequenceId();
- }
-
@Override
public long getSequenceId() {
return seqId;
@@ -502,30 +466,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return tagsLength;
}
- @Override
- @Deprecated
- public byte[] getValue() {
- return CellUtil.cloneValue(this);
- }
-
- @Override
- @Deprecated
- public byte[] getFamily() {
- return CellUtil.cloneFamily(this);
- }
-
- @Override
- @Deprecated
- public byte[] getQualifier() {
- return CellUtil.cloneQualifier(this);
- }
-
- @Override
- @Deprecated
- public byte[] getRow() {
- return CellUtil.cloneRow(this);
- }
-
@Override
public String toString() {
return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
index 26b513c071d..fa98f70b885 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java
@@ -301,7 +301,7 @@ public class RedundantKVGenerator {
for (KeyValue kv : keyValues) {
totalSize += kv.getLength();
if (includesMemstoreTS) {
- totalSize += WritableUtils.getVIntSize(kv.getMvccVersion());
+ totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
}
}
@@ -309,7 +309,7 @@ public class RedundantKVGenerator {
for (KeyValue kv : keyValues) {
result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includesMemstoreTS) {
- ByteBufferUtils.writeVLong(result, kv.getMvccVersion());
+ ByteBufferUtils.writeVLong(result, kv.getSequenceId());
}
}
return result;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index ed8f901783a..d9e76e6545d 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -18,7 +18,9 @@
package org.apache.hadoop.hbase;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.math.BigDecimal;
@@ -154,12 +156,6 @@ public class TestCellUtil {
return 0;
}
- @Override
- public long getMvccVersion() {
- // TODO Auto-generated method stub
- return 0;
- }
-
@Override
public byte[] getValueArray() {
// TODO Auto-generated method stub
@@ -190,30 +186,6 @@ public class TestCellUtil {
return 0;
}
- @Override
- public byte[] getValue() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public byte[] getFamily() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public byte[] getQualifier() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public byte[] getRow() {
- // TODO Auto-generated method stub
- return null;
- }
-
@Override
public long getSequenceId() {
// TODO Auto-generated method stub
@@ -592,11 +564,6 @@ public class TestCellUtil {
return KeyValue.Type.Put.getCode();
}
- @Override
- public long getMvccVersion() {
- return 0;
- }
-
@Override
public long getSequenceId() {
return 0;
@@ -639,26 +606,6 @@ public class TestCellUtil {
return tagsLen;
}
- @Override
- public byte[] getValue() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public byte[] getFamily() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public byte[] getQualifier() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public byte[] getRow() {
- throw new UnsupportedOperationException();
- }
-
@Override
public ByteBuffer getRowByteBuffer() {
return this.buffer;
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index 5cb61c39e50..cc1e511f52e 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -93,7 +93,8 @@ public class TestKeyValue extends TestCase {
private void check(final byte [] row, final byte [] family, byte [] qualifier,
final long timestamp, final byte [] value) {
KeyValue kv = new KeyValue(row, family, qualifier, timestamp, value);
- assertTrue(Bytes.compareTo(kv.getRow(), row) == 0);
+ assertTrue(Bytes.compareTo(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
+ row.length) == 0);
assertTrue(CellUtil.matchingColumn(kv, family, qualifier));
// Call toString to make sure it works.
LOG.info(kv.toString());
@@ -390,9 +391,10 @@ public class TestKeyValue extends TestCase {
// keys are still the same
assertTrue(kv1.equals(kv1ko));
// but values are not
- assertTrue(kv1ko.getValue().length == (useLen?Bytes.SIZEOF_INT:0));
+ assertTrue(kv1ko.getValueLength() == (useLen?Bytes.SIZEOF_INT:0));
if (useLen) {
- assertEquals(kv1.getValueLength(), Bytes.toInt(kv1ko.getValue()));
+ assertEquals(kv1.getValueLength(),
+ Bytes.toInt(kv1ko.getValueArray(), kv1ko.getValueOffset(), kv1ko.getValueLength()));
}
}
}
@@ -442,10 +444,14 @@ public class TestKeyValue extends TestCase {
KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] {
new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) });
assertTrue(kv.getTagsLength() > 0);
- assertTrue(Bytes.equals(kv.getRow(), row));
- assertTrue(Bytes.equals(kv.getFamily(), cf));
- assertTrue(Bytes.equals(kv.getQualifier(), q));
- assertTrue(Bytes.equals(kv.getValue(), value));
+ assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
+ row.length));
+ assertTrue(Bytes.equals(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(), cf, 0,
+ cf.length));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), q, 0, q.length));
+ assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value, 0,
+ value.length));
List tags = kv.getTags();
assertNotNull(tags);
assertEquals(2, tags.size());
@@ -492,7 +498,7 @@ public class TestKeyValue extends TestCase {
Bytes.equals(next.getValue(), metaValue2);
assertFalse(tagItr.hasNext());
}
-
+
public void testMetaKeyComparator() {
CellComparator c = CellComparator.META_COMPARATOR;
long now = System.currentTimeMillis();
@@ -500,23 +506,23 @@ public class TestKeyValue extends TestCase {
KeyValue a = new KeyValue(Bytes.toBytes("table1"), now);
KeyValue b = new KeyValue(Bytes.toBytes("table2"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table1,111"), now);
b = new KeyValue(Bytes.toBytes("table2"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table1"), now);
b = new KeyValue(Bytes.toBytes("table2,111"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table,111"), now);
b = new KeyValue(Bytes.toBytes("table,2222"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now);
b = new KeyValue(Bytes.toBytes("table,2222"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table,111"), now);
b = new KeyValue(Bytes.toBytes("table,2222.bbb"), now);
assertTrue(c.compare(a, b) < 0);
@@ -524,7 +530,7 @@ public class TestKeyValue extends TestCase {
a = new KeyValue(Bytes.toBytes("table,,aaaa"), now);
b = new KeyValue(Bytes.toBytes("table,111,bbb"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now);
b = new KeyValue(Bytes.toBytes("table,111,bbb"), now);
assertTrue(c.compare(a, b) < 0);
@@ -532,7 +538,7 @@ public class TestKeyValue extends TestCase {
a = new KeyValue(Bytes.toBytes("table,111,xxxx"), now);
b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now);
assertTrue(c.compare(a, b) < 0);
-
+
a = new KeyValue(Bytes.toBytes("table,111,11,xxx"), now);
b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now);
assertTrue(c.compare(a, b) < 0);
@@ -590,12 +596,6 @@ public class TestKeyValue extends TestCase {
return this.kv.getTagsOffset();
}
- // used to achieve atomic operations in the memstore.
- @Override
- public long getMvccVersion() {
- return this.kv.getMvccVersion();
- }
-
/**
* used to achieve atomic operations in the memstore.
*/
@@ -613,7 +613,7 @@ public class TestKeyValue extends TestCase {
}
/**
- *
+ *
* @return Timestamp
*/
@Override
@@ -729,34 +729,6 @@ public class TestKeyValue extends TestCase {
return this.kv.getQualifierLength();
}
- @Override
- @Deprecated
- public byte[] getValue() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- @Deprecated
- public byte[] getFamily() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- @Deprecated
- public byte[] getQualifier() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- @Deprecated
- public byte[] getRow() {
- // TODO Auto-generated method stub
- return null;
- }
-
/**
* @return the backing array of the entire KeyValue (all KeyValue fields are
* in a single array)
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
index a6b7cc52cb1..bd2a29d35d4 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java
@@ -40,7 +40,7 @@ public class TestByteRangeWithKVSerialization {
pbr.put((byte) (tagsLen >> 8 & 0xff));
pbr.put((byte) (tagsLen & 0xff));
pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen);
- pbr.putVLong(kv.getMvccVersion());
+ pbr.putVLong(kv.getSequenceId());
}
static KeyValue readCell(PositionedByteRange pbr) throws Exception {
@@ -88,7 +88,7 @@ public class TestByteRangeWithKVSerialization {
Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(),
kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(),
kv1.getTagsLength()));
- Assert.assertEquals(kv1.getMvccVersion(), kv.getMvccVersion());
+ Assert.assertEquals(kv1.getSequenceId(), kv.getSequenceId());
}
}
}
\ No newline at end of file
diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
index a49d9cad292..adc0eb7581a 100644
--- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
+++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -74,24 +75,24 @@ import org.junit.experimental.categories.Category;
/**
* IT test used to verify the deletes with visibility labels.
- * The test creates three tables tablename_0, tablename_1 and tablename_2 and each table
+ * The test creates three tables tablename_0, tablename_1 and tablename_2 and each table
* is associated with a unique pair of labels.
- * Another common table with the name 'commontable' is created and it has the data combined
- * from all these 3 tables such that there are 3 versions of every row but the visibility label
- * in every row corresponds to the table from which the row originated.
- * Then deletes are issued to the common table by selecting the visibility label
- * associated with each of the smaller tables.
- * After the delete is issued with one set of visibility labels we try to scan the common table
- * with each of the visibility pairs defined for the 3 tables.
- * So after the first delete is issued, a scan with the first set of visibility labels would
- * return zero result whereas the scan issued with the other two sets of visibility labels
- * should return all the rows corresponding to that set of visibility labels. The above
- * process of delete and scan is repeated until after the last set of visibility labels are
+ * Another common table with the name 'commontable' is created and it has the data combined
+ * from all these 3 tables such that there are 3 versions of every row but the visibility label
+ * in every row corresponds to the table from which the row originated.
+ * Then deletes are issued to the common table by selecting the visibility label
+ * associated with each of the smaller tables.
+ * After the delete is issued with one set of visibility labels we try to scan the common table
+ * with each of the visibility pairs defined for the 3 tables.
+ * So after the first delete is issued, a scan with the first set of visibility labels would
+ * return zero result whereas the scan issued with the other two sets of visibility labels
+ * should return all the rows corresponding to that set of visibility labels. The above
+ * process of delete and scan is repeated until after the last set of visibility labels are
* used for the deletes the common table should not return any row.
- *
- * To use this
+ *
+ * To use this
* ./hbase org.apache.hadoop.hbase.test.IntegrationTestBigLinkedListWithVisibility Loop 1 1 20000 /tmp 1 10000
- * or
+ * or
* ./hbase org.apache.hadoop.hbase.IntegrationTestsDriver -r .*IntegrationTestBigLinkedListWithVisibility.*
*/
@Category(IntegrationTests.class)
@@ -211,7 +212,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) {
Put put = new Put(current[i]);
put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]);
-
+
if (count >= 0) {
put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
}
@@ -331,7 +332,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
@Override
protected void processKV(ImmutableBytesWritable key, Result result,
org.apache.hadoop.mapreduce.Mapper.Context context, Put put,
- org.apache.hadoop.hbase.client.Delete delete) throws
+ org.apache.hadoop.hbase.client.Delete delete) throws
IOException, InterruptedException {
String visibilityExps = split[index * 2] + OR + split[(index * 2) + 1];
for (Cell kv : result.rawCells()) {
@@ -343,7 +344,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
delete = new Delete(key.get());
}
delete.setCellVisibility(new CellVisibility(visibilityExps));
- delete.deleteFamily(kv.getFamily());
+ delete.deleteFamily(CellUtil.cloneFamily(kv));
}
if (delete != null) {
context.write(key, delete);
@@ -356,14 +357,14 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
super.addOptions();
addOptWithArg("u", USER_OPT, "User name");
}
-
+
@Override
protected void processOptions(CommandLine cmd) {
super.processOptions(cmd);
if (cmd.hasOption(USER_OPT)) {
userName = cmd.getOptionValue(USER_OPT);
}
-
+
}
@Override
public void setUpCluster() throws Exception {
@@ -561,7 +562,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
}
}
- private void verify(int numReducers, long expectedNumNodes,
+ private void verify(int numReducers, long expectedNumNodes,
Path iterationOutput, Verify verify) throws Exception {
verify.setConf(getConf());
int retCode = verify.run(iterationOutput, numReducers);
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
index 7fceaa5465a..d903d790219 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
@@ -102,7 +102,7 @@ public class PrefixTreeCodec implements DataBlockEncoder {
ByteBufferUtils.skip(result, keyValueLength);
offset += keyValueLength;
if (includesMvcc) {
- ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion());
+ ByteBufferUtils.writeVLong(result, currentCell.getSequenceId());
}
}
result.position(result.limit());//make it appear as if we were appending
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
index 8a45f1312f4..eefd953c36b 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java
@@ -314,12 +314,6 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return type;
}
- @Override
- @Deprecated
- public long getMvccVersion() {
- return getSequenceId();
- }
-
@Override
public long getSequenceId() {
return seqId;
@@ -355,30 +349,6 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return this.tagsLength;
}
- @Override
- @Deprecated
- public byte[] getValue() {
- return this.val;
- }
-
- @Override
- @Deprecated
- public byte[] getFamily() {
- return this.fam;
- }
-
- @Override
- @Deprecated
- public byte[] getQualifier() {
- return this.qual;
- }
-
- @Override
- @Deprecated
- public byte[] getRow() {
- return this.row;
- }
-
@Override
public String toString() {
String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());
diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
index f06634c9c38..82d1d7eef6c 100644
--- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
+++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java
@@ -131,18 +131,13 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable d = generator.generateTestKeyValues(numColumns);
for (KeyValue col : d) {
- ByteRange colRange = new SimpleMutableByteRange(col.getQualifier());
+ ByteRange colRange = new SimpleMutableByteRange(CellUtil.cloneQualifier(col));
inputs.add(colRange);
sortedColumns.add(colRange);
}
diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java
index ec115511584..0c8caf4dce7 100644
--- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java
+++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java
@@ -181,7 +181,7 @@ public class TestRowEncoder {
// assert keys are equal (doesn't compare values)
Assert.assertEquals(expected, actual);
if (includeMemstoreTS) {
- Assert.assertEquals(expected.getMvccVersion(), actual.getMvccVersion());
+ Assert.assertEquals(expected.getSequenceId(), actual.getSequenceId());
}
// assert values equal
Assert.assertTrue(Bytes.equals(expected.getValueArray(), expected.getValueOffset(),
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
index a12c747f18b..f85d745b8ed 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
@@ -115,7 +115,8 @@ public class RemoteHTable implements Table {
if (o instanceof byte[]) {
sb.append(Bytes.toStringBinary((byte[])o));
} else if (o instanceof KeyValue) {
- sb.append(Bytes.toStringBinary(((KeyValue)o).getQualifier()));
+ sb.append(Bytes.toStringBinary(((KeyValue) o).getRowArray(),
+ ((KeyValue) o).getRowOffset(), ((KeyValue) o).getRowLength()));
} else {
throw new RuntimeException("object type not handled");
}
diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
index 3acddc1aa30..22ee31d39dd 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
@@ -19,6 +19,10 @@
package org.apache.hadoop.hbase.rest;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
import java.io.ByteArrayInputStream;
import java.io.StringWriter;
import java.util.ArrayList;
@@ -32,16 +36,24 @@ import javax.xml.bind.Unmarshaller;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryComparator;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
import org.apache.hadoop.hbase.filter.PageFilter;
@@ -52,8 +64,6 @@ import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SkipFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response;
@@ -64,9 +74,6 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes;
-
-import static org.junit.Assert.*;
-
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -108,7 +115,7 @@ public class TestScannersWithFilters {
};
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- private static final HBaseRESTTestingUtility REST_TEST_UTIL =
+ private static final HBaseRESTTestingUtility REST_TEST_UTIL =
new HBaseRESTTestingUtility();
private static Client client;
private static JAXBContext context;
@@ -128,7 +135,7 @@ public class TestScannersWithFilters {
ScannerModel.class);
marshaller = context.createMarshaller();
unmarshaller = context.createUnmarshaller();
- client = new Client(new Cluster().add("localhost",
+ client = new Client(new Cluster().add("localhost",
REST_TEST_UTIL.getServletPort()));
Admin admin = TEST_UTIL.getHBaseAdmin();
if (!admin.tableExists(TABLE)) {
@@ -154,7 +161,7 @@ public class TestScannersWithFilters {
}
table.put(p);
}
-
+
// Insert second half (reverse families)
for(byte [] ROW : ROWS_ONE) {
Put p = new Put(ROW);
@@ -172,14 +179,14 @@ public class TestScannersWithFilters {
}
table.put(p);
}
-
+
// Delete the second qualifier from all rows and families
for(byte [] ROW : ROWS_ONE) {
Delete d = new Delete(ROW);
d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
table.delete(d);
- }
+ }
for(byte [] ROW : ROWS_TWO) {
Delete d = new Delete(ROW);
d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
@@ -187,7 +194,7 @@ public class TestScannersWithFilters {
table.delete(d);
}
colsPerRow -= 2;
-
+
// Delete the second rows from both groups, one column at a time
for(byte [] QUALIFIER : QUALIFIERS_ONE) {
Delete d = new Delete(ROWS_ONE[1]);
@@ -212,7 +219,7 @@ public class TestScannersWithFilters {
TEST_UTIL.shutdownMiniCluster();
}
- private static void verifyScan(Scan s, long expectedRows, long expectedKeys)
+ private static void verifyScan(Scan s, long expectedRows, long expectedKeys)
throws Exception {
ScannerModel model = ScannerModel.fromScan(s);
model.setBatch(Integer.MAX_VALUE); // fetch it all at once
@@ -234,7 +241,7 @@ public class TestScannersWithFilters {
unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody()));
int rows = cells.getRows().size();
- assertTrue("Scanned too many rows! Only expected " + expectedRows +
+ assertTrue("Scanned too many rows! Only expected " + expectedRows +
" total but scanned " + rows, expectedRows == rows);
for (RowModel row: cells.getRows()) {
int count = row.getCells().size();
@@ -247,7 +254,7 @@ public class TestScannersWithFilters {
assertEquals(response.getCode(), 200);
}
- private static void verifyScanFull(Scan s, KeyValue [] kvs)
+ private static void verifyScanFull(Scan s, KeyValue [] kvs)
throws Exception {
ScannerModel model = ScannerModel.fromScan(s);
model.setBatch(Integer.MAX_VALUE); // fetch it all at once
@@ -281,19 +288,19 @@ public class TestScannersWithFilters {
RowModel rowModel = i.next();
List cells = rowModel.getCells();
if (cells.isEmpty()) break;
- assertTrue("Scanned too many keys! Only expected " + kvs.length +
- " total but already scanned " + (cells.size() + idx),
+ assertTrue("Scanned too many keys! Only expected " + kvs.length +
+ " total but already scanned " + (cells.size() + idx),
kvs.length >= idx + cells.size());
for (CellModel cell: cells) {
- assertTrue("Row mismatch",
- Bytes.equals(rowModel.getKey(), kvs[idx].getRow()));
+ assertTrue("Row mismatch",
+ Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx])));
byte[][] split = KeyValue.parseColumn(cell.getColumn());
- assertTrue("Family mismatch",
- Bytes.equals(split[0], kvs[idx].getFamily()));
- assertTrue("Qualifier mismatch",
- Bytes.equals(split[1], kvs[idx].getQualifier()));
- assertTrue("Value mismatch",
- Bytes.equals(cell.getValue(), kvs[idx].getValue()));
+ assertTrue("Family mismatch",
+ Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx])));
+ assertTrue("Qualifier mismatch",
+ Bytes.equals(split[1], CellUtil.cloneQualifier(kvs[idx])));
+ assertTrue("Value mismatch",
+ Bytes.equals(cell.getValue(), CellUtil.cloneValue(kvs[idx])));
idx++;
}
}
@@ -309,7 +316,7 @@ public class TestScannersWithFilters {
marshaller.marshal(model, writer);
LOG.debug(writer.toString());
byte[] body = Bytes.toBytes(writer.toString());
- Response response = client.put("/" + TABLE + "/scanner",
+ Response response = client.put("/" + TABLE + "/scanner",
Constants.MIMETYPE_XML, body);
assertEquals(response.getCode(), 201);
String scannerURI = response.getLocation();
@@ -334,7 +341,7 @@ public class TestScannersWithFilters {
RowModel rowModel = i.next();
List cells = rowModel.getCells();
if (cells.isEmpty()) break;
- assertTrue("Scanned too many rows! Only expected " + expectedRows +
+ assertTrue("Scanned too many rows! Only expected " + expectedRows +
" total but already scanned " + (j+1), expectedRows > j);
assertEquals("Expected " + expectedKeys + " keys per row but " +
"returned " + cells.size(), expectedKeys, cells.size());
@@ -348,7 +355,7 @@ public class TestScannersWithFilters {
// No filter
long expectedRows = numRows;
long expectedKeys = colsPerRow;
-
+
// Both families
Scan s = new Scan();
verifyScan(s, expectedRows, expectedKeys);
@@ -416,7 +423,7 @@ public class TestScannersWithFilters {
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
};
-
+
// Grab all 6 rows
long expectedRows = 6;
long expectedKeys = colsPerRow;
@@ -425,7 +432,7 @@ public class TestScannersWithFilters {
verifyScan(s, expectedRows, expectedKeys);
s.setFilter(new PageFilter(expectedRows));
verifyScanFull(s, expectedKVs);
-
+
// Grab first 4 rows (6 cols per row)
expectedRows = 4;
expectedKeys = colsPerRow;
@@ -434,7 +441,7 @@ public class TestScannersWithFilters {
verifyScan(s, expectedRows, expectedKeys);
s.setFilter(new PageFilter(expectedRows));
verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
-
+
// Grab first 2 rows
expectedRows = 2;
expectedKeys = colsPerRow;
@@ -451,20 +458,20 @@ public class TestScannersWithFilters {
s.setFilter(new PageFilter(expectedRows));
verifyScan(s, expectedRows, expectedKeys);
s.setFilter(new PageFilter(expectedRows));
- verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));
+ verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));
}
@Test
public void testInclusiveStopFilter() throws Exception {
// Grab rows from group one
-
+
// If we just use start/stop row, we get total/2 - 1 rows
long expectedRows = (numRows / 2) - 1;
long expectedKeys = colsPerRow;
- Scan s = new Scan(Bytes.toBytes("testRowOne-0"),
+ Scan s = new Scan(Bytes.toBytes("testRowOne-0"),
Bytes.toBytes("testRowOne-3"));
verifyScan(s, expectedRows, expectedKeys);
-
+
// Now use start row with inclusive stop filter
expectedRows = numRows / 2;
s = new Scan(Bytes.toBytes("testRowOne-0"));
@@ -472,14 +479,14 @@ public class TestScannersWithFilters {
verifyScan(s, expectedRows, expectedKeys);
// Grab rows from group two
-
+
// If we just use start/stop row, we get total/2 - 1 rows
expectedRows = (numRows / 2) - 1;
expectedKeys = colsPerRow;
- s = new Scan(Bytes.toBytes("testRowTwo-0"),
+ s = new Scan(Bytes.toBytes("testRowTwo-0"),
Bytes.toBytes("testRowTwo-3"));
verifyScan(s, expectedRows, expectedKeys);
-
+
// Now use start row with inclusive stop filter
expectedRows = numRows / 2;
s = new Scan(Bytes.toBytes("testRowTwo-0"));
@@ -497,7 +504,7 @@ public class TestScannersWithFilters {
Scan s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys less than same qualifier
// Expect only two keys (one from each family) in half the rows
expectedRows = numRows / 2;
@@ -507,7 +514,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys less than or equal
// Expect four keys (two from each family) in half the rows
expectedRows = numRows / 2;
@@ -517,7 +524,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys not equal
// Expect four keys (two from each family)
// Only look in first group of rows
@@ -528,7 +535,7 @@ public class TestScannersWithFilters {
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys greater or equal
// Expect four keys (two from each family)
// Only look in first group of rows
@@ -539,7 +546,7 @@ public class TestScannersWithFilters {
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys greater
// Expect two keys (one from each family)
// Only look in first group of rows
@@ -550,7 +557,7 @@ public class TestScannersWithFilters {
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys not equal to
// Look across rows and fully validate the keys and ordering
// Expect varied numbers of keys, 4 per row in group one, 6 per row in
@@ -559,7 +566,7 @@ public class TestScannersWithFilters {
new BinaryComparator(QUALIFIERS_ONE[2]));
s = new Scan();
s.setFilter(f);
-
+
KeyValue [] kvs = {
// testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -599,7 +606,7 @@ public class TestScannersWithFilters {
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
};
verifyScanFull(s, kvs);
-
+
// Test across rows and groups with a regex
// Filter out "test*-2"
// Expect 4 keys per row across both groups
@@ -607,7 +614,7 @@ public class TestScannersWithFilters {
new RegexStringComparator("test.+-2"));
s = new Scan();
s.setFilter(f);
-
+
kvs = new KeyValue [] {
// testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -653,7 +660,7 @@ public class TestScannersWithFilters {
Scan s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match a two rows, one from each group, using regex
expectedRows = 2;
expectedKeys = colsPerRow;
@@ -662,7 +669,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match rows less than
// Expect all keys in one row
expectedRows = 1;
@@ -672,7 +679,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match rows less than or equal
// Expect all keys in two rows
expectedRows = 2;
@@ -682,7 +689,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match rows not equal
// Expect all keys in all but one row
expectedRows = numRows - 1;
@@ -692,7 +699,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys greater or equal
// Expect all keys in all but one row
expectedRows = numRows - 1;
@@ -702,7 +709,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match keys greater
// Expect all keys in all but two rows
expectedRows = numRows - 2;
@@ -712,7 +719,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match rows not equal to testRowTwo-2
// Look across rows and fully validate the keys and ordering
// Should see all keys in all rows but testRowTwo-2
@@ -720,7 +727,7 @@ public class TestScannersWithFilters {
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
s = new Scan();
s.setFilter(f);
-
+
KeyValue [] kvs = {
// testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -759,7 +766,7 @@ public class TestScannersWithFilters {
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
};
verifyScanFull(s, kvs);
-
+
// Test across rows and groups with a regex
// Filter out everything that doesn't match "*-2"
// Expect all keys in two rows
@@ -767,7 +774,7 @@ public class TestScannersWithFilters {
new RegexStringComparator(".+-2"));
s = new Scan();
s.setFilter(f);
-
+
kvs = new KeyValue [] {
// testRowOne-2
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@@ -825,7 +832,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match values less than or equal
// Expect all rows
expectedRows = numRows;
@@ -845,7 +852,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match values not equal
// Expect half the rows
expectedRows = numRows / 2;
@@ -855,7 +862,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match values greater or equal
// Expect all rows
expectedRows = numRows;
@@ -865,7 +872,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match values greater
// Expect half rows
expectedRows = numRows / 2;
@@ -875,7 +882,7 @@ public class TestScannersWithFilters {
s = new Scan();
s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
-
+
// Match values not equal to testValueOne
// Look across rows and fully validate the keys and ordering
// Should see all keys in all group two rows
@@ -883,7 +890,7 @@ public class TestScannersWithFilters {
new BinaryComparator(Bytes.toBytes("testValueOne")));
s = new Scan();
s.setFilter(f);
-
+
KeyValue [] kvs = {
// testRowTwo-0
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
@@ -918,7 +925,7 @@ public class TestScannersWithFilters {
new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
Scan s = new Scan();
s.setFilter(f);
-
+
KeyValue [] kvs = {
// testRowTwo-0
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
@@ -947,7 +954,7 @@ public class TestScannersWithFilters {
@Test
public void testFilterList() throws Exception {
- // Test getting a single row, single key using Row, Qualifier, and Value
+ // Test getting a single row, single key using Row, Qualifier, and Value
// regular expression and substring filters
// Use must pass all
List filters = new ArrayList();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java
index eb6d3bef82f..3ada51a67d8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java
@@ -106,12 +106,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
return cell.getTypeByte();
}
- @Override
- @Deprecated
- public long getMvccVersion() {
- return getSequenceId();
- }
-
@Override
public long getSequenceId() {
return cell.getSequenceId();
@@ -151,30 +145,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
return this.tags.length;
}
- @Override
- @Deprecated
- public byte[] getValue() {
- return cell.getValue();
- }
-
- @Override
- @Deprecated
- public byte[] getFamily() {
- return cell.getFamily();
- }
-
- @Override
- @Deprecated
- public byte[] getQualifier() {
- return cell.getQualifier();
- }
-
- @Override
- @Deprecated
- public byte[] getRow() {
- return cell.getRow();
- }
-
@Override
public long heapSize() {
long sum = CellUtil.estimatedHeapSizeOf(cell) - cell.getTagsLength();
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index c067fc3fa2e..9d9cee0e8a8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -17,6 +17,12 @@
*/
package org.apache.hadoop.hbase.mapreduce;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Map;
+import java.util.TreeMap;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -48,12 +54,6 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Map;
-import java.util.TreeMap;
-
/**
* A tool to replay WAL files as a M/R job.
* The WAL can be replayed for a set of tables or all tables,
@@ -106,8 +106,8 @@ public class WALPlayer extends Configured implements Tool {
if (Bytes.equals(table, key.getTablename().getName())) {
for (Cell cell : value.getCells()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
- if (WALEdit.isMetaEditFamily(kv.getFamily())) continue;
- context.write(new ImmutableBytesWritable(kv.getRow()), kv);
+ if (WALEdit.isMetaEditFamily(kv)) continue;
+ context.write(new ImmutableBytesWritable(CellUtil.cloneRow(kv)), kv);
}
}
} catch (InterruptedException e) {
@@ -149,7 +149,7 @@ public class WALPlayer extends Configured implements Tool {
Cell lastCell = null;
for (Cell cell : value.getCells()) {
// filtering WAL meta entries
- if (WALEdit.isMetaEditFamily(cell.getFamily())) continue;
+ if (WALEdit.isMetaEditFamily(cell)) continue;
// Allow a subclass filter out this cell.
if (filter(context, cell)) {
@@ -163,9 +163,9 @@ public class WALPlayer extends Configured implements Tool {
if (put != null) context.write(tableOut, put);
if (del != null) context.write(tableOut, del);
if (CellUtil.isDelete(cell)) {
- del = new Delete(cell.getRow());
+ del = new Delete(CellUtil.cloneRow(cell));
} else {
- put = new Put(cell.getRow());
+ put = new Put(CellUtil.cloneRow(cell));
}
}
if (CellUtil.isDelete(cell)) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
index d6dfb76dc45..02a73f8c91a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
@@ -329,7 +329,7 @@ public class ScanQueryMatcher {
* they affect
*/
byte typeByte = cell.getTypeByte();
- long mvccVersion = cell.getMvccVersion();
+ long mvccVersion = cell.getSequenceId();
if (CellUtil.isDelete(cell)) {
if (keepDeletedCells == KeepDeletedCells.FALSE
|| (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
index 45610fa8a7e..bb49aba56c5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
@@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue;
@@ -168,7 +169,7 @@ public class StripeStoreFileManager
// Order matters for this call.
result.addSublist(state.level0Files);
if (!state.stripeFiles.isEmpty()) {
- int lastStripeIndex = findStripeForRow(targetKey.getRow(), false);
+ int lastStripeIndex = findStripeForRow(CellUtil.cloneRow(targetKey), false);
for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) {
result.addSublist(state.stripeFiles.get(stripeIndex));
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
index f6619e8e0f5..a752ff19517 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
@@ -288,7 +288,7 @@ public class WALEdit implements Writable, HeapSize {
public static FlushDescriptor getFlushDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, FLUSH)) {
- return FlushDescriptor.parseFrom(cell.getValue());
+ return FlushDescriptor.parseFrom(CellUtil.cloneValue(cell));
}
return null;
}
@@ -302,7 +302,7 @@ public class WALEdit implements Writable, HeapSize {
public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) {
- return RegionEventDescriptor.parseFrom(cell.getValue());
+ return RegionEventDescriptor.parseFrom(CellUtil.cloneValue(cell));
}
return null;
}
@@ -336,7 +336,7 @@ public class WALEdit implements Writable, HeapSize {
*/
public static CompactionDescriptor getCompaction(Cell kv) throws IOException {
if (CellUtil.matchingColumn(kv, METAFAMILY, COMPACTION)) {
- return CompactionDescriptor.parseFrom(kv.getValue());
+ return CompactionDescriptor.parseFrom(CellUtil.cloneValue(kv));
}
return null;
}
@@ -365,7 +365,7 @@ public class WALEdit implements Writable, HeapSize {
*/
public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) {
- return WALProtos.BulkLoadDescriptor.parseFrom(cell.getValue());
+ return WALProtos.BulkLoadDescriptor.parseFrom(CellUtil.cloneValue(cell));
}
return null;
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
index 166dc37210b..3501f3e70ba 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
@@ -23,6 +23,7 @@ import java.util.NavigableMap;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.wal.WAL.Entry;
@@ -44,8 +45,8 @@ public class ScopeWALEntryFilter implements WALEntryFilter {
Cell cell = cells.get(i);
// The scope will be null or empty if
// there's nothing to replicate in that WALEdit
- if (!scopes.containsKey(cell.getFamily())
- || scopes.get(cell.getFamily()) == HConstants.REPLICATION_SCOPE_LOCAL) {
+ byte[] fam = CellUtil.cloneFamily(cell);
+ if (!scopes.containsKey(fam) || scopes.get(fam) == HConstants.REPLICATION_SCOPE_LOCAL) {
cells.remove(i);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
index b8925125423..0cbbcef0496 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
@@ -25,9 +25,10 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.wal.WAL.Entry;
public class TableCfWALEntryFilter implements WALEntryFilter {
@@ -62,7 +63,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
Cell cell = cells.get(i);
// ignore(remove) kv if its cf isn't in the replicable cf list
// (empty cfs means all cfs of this table are replicable)
- if ((cfs != null && !cfs.contains(Bytes.toString(cell.getFamily())))) {
+ if ((cfs != null && !cfs.contains(Bytes.toString(CellUtil.cloneFamily(cell))))) {
cells.remove(i);
}
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
index c75f81f1b77..b3db0f616dd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants;
@@ -354,7 +355,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
}
sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(),
- entries.get(0).getEdit().getCells().get(0).getRow(), entries);
+ CellUtil.cloneRow(entries.get(0).getEdit().getCells().get(0)), entries);
}
@Override
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index 5b0f469a04a..b396dfcc270 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -244,7 +244,7 @@ public class Replication extends WALActionsListener.Base implements
new TreeMap(Bytes.BYTES_COMPARATOR);
byte[] family;
for (Cell cell : logEdit.getCells()) {
- family = cell.getFamily();
+ family = CellUtil.cloneFamily(cell);
// This is expected and the KV should not be replicated
if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;
// Unexpected, has a tendency to happen in unit tests
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
index 7e9299a9399..8bd69a29f47 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
@@ -329,9 +329,9 @@ public class AccessController extends BaseMasterAndRegionObserver
List kvList = (List)family.getValue();
for (KeyValue kv : kvList) {
if (!authManager.authorize(user, tableName, family.getKey(),
- kv.getQualifier(), permRequest)) {
- return AuthResult.deny(request, "Failed qualifier check", user,
- permRequest, tableName, makeFamilyMap(family.getKey(), kv.getQualifier()));
+ CellUtil.cloneQualifier(kv), permRequest)) {
+ return AuthResult.deny(request, "Failed qualifier check", user, permRequest,
+ tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv)));
}
}
}
@@ -749,7 +749,7 @@ public class AccessController extends BaseMasterAndRegionObserver
}
}
} else if (entry.getValue() == null) {
- get.addFamily(col);
+ get.addFamily(col);
} else {
throw new RuntimeException("Unhandled collection type " +
entry.getValue().getClass().getName());
@@ -1308,7 +1308,7 @@ public class AccessController extends BaseMasterAndRegionObserver
@Override
public void preModifyNamespace(ObserverContext ctx,
NamespaceDescriptor ns) throws IOException {
- // We require only global permission so that
+ // We require only global permission so that
// a user with NS admin cannot altering namespace configurations. i.e. namespace quota
requireGlobalPermission("modifyNamespace", Action.ADMIN, ns.getName());
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 0e20903129f..0c3ff8390d5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -393,7 +393,7 @@ public class HBaseFsck extends Configured implements Closeable {
LOG.info("Failed to create lock file " + hbckLockFilePath.getName()
+ ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "
+ retryCounter.getMaxAttempts());
- LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),
+ LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),
ioe);
try {
exception = ioe;
@@ -880,7 +880,7 @@ public class HBaseFsck extends Configured implements Closeable {
hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf());
hf.loadFileInfo();
Cell startKv = hf.getFirstKey();
- start = startKv.getRow();
+ start = CellUtil.cloneRow(startKv);
Cell endKv = hf.getLastKey();
end = CellUtil.cloneRow(endKv);
} catch (IOException ioe) {
@@ -2685,10 +2685,10 @@ public class HBaseFsck extends Configured implements Closeable {
}
regionsFromMeta = Ordering.natural().immutableSortedCopy(regions);
}
-
+
return regionsFromMeta;
}
-
+
private class IntegrityFixSuggester extends TableIntegrityErrorHandlerImpl {
ErrorReporter errors;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
index e579164d501..4c55cb336c4 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java
@@ -33,8 +33,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -43,14 +41,14 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
+import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.codehaus.jackson.map.ObjectMapper;
-import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
-// imports for things that haven't moved yet.
-import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-
/**
* WALPrettyPrinter prints the contents of a given WAL with a variety of
* options affecting formatting and extent of content.
@@ -245,7 +243,7 @@ public class WALPrettyPrinter {
}
WAL.Reader log = WALFactory.createReader(fs, p, conf);
-
+
if (log instanceof ProtobufLogReader) {
List writerClsNames = ((ProtobufLogReader) log).getWriterClsNames();
if (writerClsNames != null && writerClsNames.size() > 0) {
@@ -258,18 +256,18 @@ public class WALPrettyPrinter {
}
out.println();
}
-
+
String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName();
if (cellCodecClsName != null) {
out.println("Cell Codec Class: " + cellCodecClsName);
}
}
-
+
if (outputJSON && !persistentOutput) {
out.print("[");
firstTxn = true;
}
-
+
try {
WAL.Entry entry;
while ((entry = log.next()) != null) {
@@ -288,7 +286,7 @@ public class WALPrettyPrinter {
for (Cell cell : edit.getCells()) {
// add atomic operation to txn
Map op = new HashMap(toStringMap(cell));
- if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue()));
+ if (outputValues) op.put("value", Bytes.toStringBinary(CellUtil.cloneValue(cell)));
// check row output filter
if (row == null || ((String) op.get("row")).equals(row)) {
actions.add(op);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 1b3fcf2844a..0dd8beacb96 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -3744,11 +3744,11 @@ public class TestFromClientSide {
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0);
- assertTrue(Bytes.equals(kv.getFamily(), CONTENTS_FAMILY));
+ assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), CONTENTS_FAMILY));
// will it return null or an empty byte array?
- assertTrue(Bytes.equals(kv.getQualifier(), new byte[0]));
+ assertTrue(Bytes.equals(CellUtil.cloneQualifier(kv), new byte[0]));
- assertTrue(Bytes.equals(kv.getValue(), value));
+ assertTrue(Bytes.equals(CellUtil.cloneValue(kv), value));
table.put(put);
@@ -5335,7 +5335,7 @@ public class TestFromClientSide {
assertEquals(1, regionsList.size());
}
}
-
+
private List getRegionsInRange(TableName tableName, byte[] startKey,
byte[] endKey) throws IOException {
List regionsInRange = new ArrayList();
@@ -5778,8 +5778,11 @@ public class TestFromClientSide {
int expectedIndex = 5;
for (Result result : scanner) {
assertEquals(result.size(), 1);
- assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[expectedIndex]));
- assertTrue(Bytes.equals(result.rawCells()[0].getQualifier(), QUALIFIERS[expectedIndex]));
+ Cell c = result.rawCells()[0];
+ assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
+ ROWS[expectedIndex], 0, ROWS[expectedIndex].length));
+ assertTrue(Bytes.equals(c.getQualifierArray(), c.getQualifierOffset(),
+ c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length));
expectedIndex--;
}
assertEquals(expectedIndex, 0);
@@ -5817,7 +5820,7 @@ public class TestFromClientSide {
for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1);
assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
- assertEquals(Bytes.toInt(result.rawCells()[0].getValue()), VALUE.length);
+ assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
count++;
}
assertEquals(count, 10);
@@ -6099,15 +6102,15 @@ public class TestFromClientSide {
result = scanner.next();
assertTrue("Expected 2 keys but received " + result.size(),
result.size() == 2);
- assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[4]));
- assertTrue(Bytes.equals(result.rawCells()[1].getRow(), ROWS[4]));
- assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[1]));
- assertTrue(Bytes.equals(result.rawCells()[1].getValue(), VALUES[2]));
+ assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[4]));
+ assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[1]), ROWS[4]));
+ assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[1]));
+ assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[1]), VALUES[2]));
result = scanner.next();
assertTrue("Expected 1 key but received " + result.size(),
result.size() == 1);
- assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[3]));
- assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[0]));
+ assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[3]));
+ assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[0]));
scanner.close();
ht.close();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index c46056d8536..9be2f6423e8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -27,6 +27,7 @@ import java.util.ConcurrentModificationException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -106,8 +107,8 @@ public class TestPutDeleteEtcCellIteration {
Cell cell = cellScanner.current();
byte [] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue)cell;
- assertTrue(Bytes.equals(kv.getFamily(), bytes));
- assertTrue(Bytes.equals(kv.getValue(), bytes));
+ assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
+ assertTrue(Bytes.equals(CellUtil.cloneValue(kv), bytes));
}
assertEquals(COUNT, index);
}
@@ -125,8 +126,8 @@ public class TestPutDeleteEtcCellIteration {
int value = index;
byte [] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue)cell;
- assertTrue(Bytes.equals(kv.getFamily(), bytes));
- long a = Bytes.toLong(kv.getValue());
+ assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
+ long a = Bytes.toLong(CellUtil.cloneValue(kv));
assertEquals(value, a);
}
assertEquals(COUNT, index);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
index dd8c7b4c752..6e1a03833d6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
@@ -20,19 +20,19 @@
package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
-import java.util.List;
import java.util.Arrays;
+import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.wal.WALKey;
/**
* Class for testing WALObserver coprocessor.
@@ -119,8 +119,8 @@ implements WALObserver {
Cell deletedCell = null;
for (Cell cell : cells) {
// assume only one kv from the WALEdit matches.
- byte[] family = cell.getFamily();
- byte[] qulifier = cell.getQualifier();
+ byte[] family = CellUtil.cloneFamily(cell);
+ byte[] qulifier = CellUtil.cloneQualifier(cell);
if (Arrays.equals(family, ignoredFamily) &&
Arrays.equals(qulifier, ignoredQualifier)) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
index 601db766024..6707354976c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
@@ -66,10 +66,10 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
-import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.wal.WALKey;
import com.google.common.collect.ImmutableList;
@@ -221,13 +221,13 @@ public class SimpleRegionObserver extends BaseRegionObserver {
List metaEntries) throws IOException {
ctPreSplitBeforePONR.incrementAndGet();
}
-
+
@Override
public void preSplitAfterPONR(
ObserverContext ctx) throws IOException {
ctPreSplitAfterPONR.incrementAndGet();
}
-
+
@Override
public void postSplit(ObserverContext c, Region l, Region r) {
ctPostSplit.incrementAndGet();
@@ -370,7 +370,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
}
@Override
- public void prePut(final ObserverContext c,
+ public void prePut(final ObserverContext c,
final Put put, final WALEdit edit,
final Durability durability) throws IOException {
Map> familyMap = put.getFamilyCellMap();
@@ -384,20 +384,23 @@ public class SimpleRegionObserver extends BaseRegionObserver {
assertNotNull(cells);
assertNotNull(cells.get(0));
KeyValue kv = (KeyValue)cells.get(0);
- assertTrue(Bytes.equals(kv.getQualifier(),
- TestRegionObserverInterface.A));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
+ TestRegionObserverInterface.A.length));
cells = familyMap.get(TestRegionObserverInterface.B);
assertNotNull(cells);
assertNotNull(cells.get(0));
kv = (KeyValue)cells.get(0);
- assertTrue(Bytes.equals(kv.getQualifier(),
- TestRegionObserverInterface.B));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
+ TestRegionObserverInterface.B.length));
cells = familyMap.get(TestRegionObserverInterface.C);
assertNotNull(cells);
assertNotNull(cells.get(0));
kv = (KeyValue)cells.get(0);
- assertTrue(Bytes.equals(kv.getQualifier(),
- TestRegionObserverInterface.C));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
+ TestRegionObserverInterface.C.length));
}
ctPrePut.incrementAndGet();
}
@@ -418,25 +421,31 @@ public class SimpleRegionObserver extends BaseRegionObserver {
assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)cells.get(0);
- assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.A));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
+ TestRegionObserverInterface.A.length));
cells = familyMap.get(TestRegionObserverInterface.B);
assertNotNull(cells);
assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
kv = (KeyValue)cells.get(0);
- assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.B));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
+ TestRegionObserverInterface.B.length));
cells = familyMap.get(TestRegionObserverInterface.C);
assertNotNull(cells);
assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
kv = (KeyValue)cells.get(0);
- assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.C));
+ assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
+ kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
+ TestRegionObserverInterface.C.length));
}
ctPostPut.incrementAndGet();
}
@Override
- public void preDelete(final ObserverContext c,
+ public void preDelete(final ObserverContext c,
final Delete delete, final WALEdit edit,
final Durability durability) throws IOException {
Map> familyMap = delete.getFamilyCellMap();
@@ -456,7 +465,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
}
@Override
- public void postDelete(final ObserverContext c,
+ public void postDelete(final ObserverContext c,
final Delete delete, final WALEdit edit,
final Durability durability) throws IOException {
Map> familyMap = delete.getFamilyCellMap();
@@ -467,7 +476,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
ctBeforeDelete.set(0);
ctPostDeleted.incrementAndGet();
}
-
+
@Override
public void preBatchMutate(ObserverContext c,
MiniBatchOperationInProgress miniBatchOp) throws IOException {
@@ -604,7 +613,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
}
@Override
- public Result preAppendAfterRowLock(ObserverContext e,
+ public Result preAppendAfterRowLock(ObserverContext e,
Append append) throws IOException {
ctPreAppendAfterRowLock.incrementAndGet();
return null;
@@ -724,7 +733,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public boolean hadPostPut() {
return ctPostPut.get() > 0;
}
-
+
public boolean hadPreBatchMutate() {
return ctPreBatchMutate.get() > 0;
}
@@ -784,7 +793,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public boolean hadPreIncrement() {
return ctPreIncrement.get() > 0;
}
-
+
public boolean hadPreIncrementAfterRowLock() {
return ctPreIncrementAfterRowLock.get() > 0;
}
@@ -808,7 +817,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public boolean hadPrePreparedDeleteTS() {
return ctPrePrepareDeleteTS.get() > 0;
}
-
+
public boolean hadPreWALRestore() {
return ctPreWALRestore.get() > 0;
}
@@ -874,7 +883,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public int getCtPreSplit() {
return ctPreSplit.get();
}
-
+
public int getCtPreSplitBeforePONR() {
return ctPreSplitBeforePONR.get();
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index cdcdeed391a..a3c106dc9a4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -51,11 +52,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-import org.apache.hadoop.hbase.wal.DefaultWALProvider;
-import org.apache.hadoop.hbase.wal.WAL;
-import org.apache.hadoop.hbase.wal.WALFactory;
-import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -63,14 +59,19 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.wal.DefaultWALProvider;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALFactory;
+import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.hbase.wal.WALSplitter;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
-import org.junit.rules.TestName;
import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
/**
* Tests invocation of the
@@ -216,14 +217,14 @@ public class TestWALObserver {
List cells = edit.getCells();
for (Cell cell : cells) {
- if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) {
+ if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true;
}
- if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) {
+ if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true;
}
- if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) {
- if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) {
+ if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
+ if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true;
}
}
@@ -244,14 +245,14 @@ public class TestWALObserver {
foundFamily2 = false;
modifiedFamily1 = false;
for (Cell cell : cells) {
- if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) {
+ if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true;
}
- if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) {
+ if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true;
}
- if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) {
- if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) {
+ if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
+ if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true;
}
}
@@ -462,7 +463,7 @@ public class TestWALObserver {
/*
* Creates an HRI around an HTD that has tableName and three
* column families named.
- *
+ *
* @param tableName Name of table to use when we create HTableDescriptor.
*/
private HRegionInfo createBasic3FamilyHRegionInfo(final String tableName) {
@@ -496,7 +497,7 @@ public class TestWALObserver {
/**
* Copied from HRegion.
- *
+ *
* @param familyMap
* map of family->edits
* @param walEdit
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 3601b0115d2..92be81a9f31 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -534,7 +534,7 @@ public class TestFilter {
ArrayList values = new ArrayList();
boolean isMoreResults = scanner.next(values);
if (!isMoreResults
- || !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) {
+ || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) {
Assert.assertTrue(
"The WhileMatchFilter should now filter all remaining",
filter.filterAllRemaining());
@@ -581,7 +581,7 @@ public class TestFilter {
/**
- * The following filter simulates a pre-0.96 filter where filterRow() is defined while
+ * The following filter simulates a pre-0.96 filter where filterRow() is defined while
* hasFilterRow() returns false
*/
static class OldTestFilter extends FilterBase {
@@ -592,25 +592,25 @@ public class TestFilter {
public boolean hasFilterRow() {
return false;
}
-
+
@Override
public boolean filterRow() {
// always filter out rows
return true;
}
-
+
@Override
public ReturnCode filterKeyValue(Cell ignored) throws IOException {
return ReturnCode.INCLUDE;
}
}
-
+
/**
- * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in
- * 0.96+ code base.
- *
+ * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in
+ * 0.96+ code base.
+ *
* See HBASE-10366
- *
+ *
* @throws Exception
*/
@Test
@@ -1558,7 +1558,7 @@ public class TestFilter {
};
for(KeyValue kv : srcKVs) {
- Put put = new Put(kv.getRow()).add(kv);
+ Put put = new Put(CellUtil.cloneRow(kv)).add(kv);
put.setDurability(Durability.SKIP_WAL);
this.region.put(put);
}
@@ -1597,7 +1597,7 @@ public class TestFilter {
// Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0]
KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]);
- this.region.put(new Put(kvA.getRow()).add(kvA));
+ this.region.put(new Put(CellUtil.cloneRow(kvA)).add(kvA));
// Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true
// Expect 1 row (3)
@@ -1971,7 +1971,7 @@ public class TestFilter {
verifyScanFullNoValues(s, expectedKVs, useLen);
}
}
-
+
/**
* Filter which makes sleeps for a second between each row of a scan.
* This can be useful for manual testing of bugs like HBASE-5973. For example:
@@ -1984,7 +1984,7 @@ public class TestFilter {
*/
public static class SlowScanFilter extends FilterBase {
private static Thread ipcHandlerThread = null;
-
+
@Override
public byte [] toByteArray() {return null;}
@@ -2099,5 +2099,5 @@ public class TestFilter {
WAL wal = ((HRegion)testRegion).getWAL();
((HRegion)testRegion).close();
wal.close();
- }
+ }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
index 8854efeb2b3..440c9f56c65 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
@@ -266,7 +266,7 @@ public class TestFilterList {
byte[] r1 = Bytes.toBytes("Row1");
byte[] r11 = Bytes.toBytes("Row11");
byte[] r2 = Bytes.toBytes("Row2");
-
+
FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
flist.addFilter(new PrefixFilter(r1));
flist.filterRowKey(KeyValueUtil.createFirstOnRow(r1));
@@ -276,7 +276,7 @@ public class TestFilterList {
flist.reset();
flist.filterRowKey(KeyValueUtil.createFirstOnRow(r2));
assertEquals(flist.filterKeyValue(new KeyValue(r2,r2,r2)), ReturnCode.SKIP);
-
+
flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
flist.addFilter(new AlwaysNextColFilter());
flist.addFilter(new PrefixFilter(r1));
@@ -298,7 +298,7 @@ public class TestFilterList {
byte[] r1 = Bytes.toBytes("Row1");
byte[] r11 = Bytes.toBytes("Row11");
byte[] r2 = Bytes.toBytes("Row2");
-
+
FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
flist.addFilter(new AlwaysNextColFilter());
flist.addFilter(new InclusiveStopFilter(r1));
@@ -390,7 +390,7 @@ public class TestFilterList {
Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter }));
// INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL.
assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpOnefilterList.filterKeyValue(null));
- // INCLUDE, SKIP, INCLUDE.
+ // INCLUDE, SKIP, INCLUDE.
assertEquals(Filter.ReturnCode.INCLUDE, mpOnefilterList.filterKeyValue(null));
// Check must pass all filter.
@@ -398,7 +398,7 @@ public class TestFilterList {
Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter }));
// INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL.
assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpAllfilterList.filterKeyValue(null));
- // INCLUDE, SKIP, INCLUDE.
+ // INCLUDE, SKIP, INCLUDE.
assertEquals(Filter.ReturnCode.SKIP, mpAllfilterList.filterKeyValue(null));
}
@@ -417,7 +417,7 @@ public class TestFilterList {
public byte [] toByteArray() {
return null;
}
-
+
@Override
public ReturnCode filterKeyValue(Cell ignored) throws IOException {
return ReturnCode.INCLUDE;
@@ -541,12 +541,13 @@ public class TestFilterList {
// Value for fam:qual1 should be stripped:
assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1));
final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1));
- assertEquals(0, transformedQual1.getValue().length);
+ assertEquals(0, transformedQual1.getValueLength());
// Value for fam:qual2 should not be stripped:
assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2));
final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2));
- assertEquals("value", Bytes.toString(transformedQual2.getValue()));
+ assertEquals("value", Bytes.toString(transformedQual2.getValueArray(),
+ transformedQual2.getValueOffset(), transformedQual2.getValueLength()));
// Other keys should be skipped:
assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
index 5d7fa3d4dc9..0e5f08e086a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
@@ -102,7 +103,7 @@ public class TestHalfStoreFileReader {
HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
r.loadFileInfo();
Cell midKV = r.midkey();
- byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow();
+ byte[] midkey = CellUtil.cloneRow(midKV);
//System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
@@ -167,7 +168,7 @@ public class TestHalfStoreFileReader {
HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
r.loadFileInfo();
Cell midKV = r.midkey();
- byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow();
+ byte[] midkey = CellUtil.cloneRow(midKV);
Reference bottom = new Reference(midkey, Reference.Range.bottom);
Reference top = new Reference(midkey, Reference.Range.top);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
index 2d478a43c46..91115c1dfc6 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java
@@ -37,6 +37,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
@@ -74,7 +75,7 @@ public class TestPrefixTreeEncoding {
CellComparator.COMPARATOR);
private static boolean formatRowNum = false;
-
+
@Parameters
public static Collection