diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java index e6d6f43d1ea..11c8ff8ae2e 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestPayloadCarryingRpcController.java @@ -138,12 +138,6 @@ public class TestPayloadCarryingRpcController { return 0; } - @Override - public long getMvccVersion() { - // unused - return 0; - } - @Override public long getSequenceId() { // unused @@ -182,30 +176,6 @@ public class TestPayloadCarryingRpcController { // unused return null; } - - @Override - public byte[] getValue() { - // unused - return null; - } - - @Override - public byte[] getFamily() { - // unused - return null; - } - - @Override - public byte[] getQualifier() { - // unused - return null; - } - - @Override - public byte[] getRow() { - // unused - return null; - } }; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java index 46ebeeb64a1..5673ec9ef11 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java @@ -44,12 +44,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * the goal of sorting newer cells first. *

*

- * This interface should not include methods that allocate new byte[]'s such as those used in client - * or debugging code. These users should use the methods found in the {@link CellUtil} class. - * Currently for to minimize the impact of existing applications moving between 0.94 and 0.96, we - * include the costly helper methods marked as deprecated. - *

- *

* Cell implements Comparable<Cell> which is only meaningful when * comparing to other keys in the * same table. It uses CellComparator which does not work on the -ROOT- and hbase:meta tables. @@ -146,19 +140,7 @@ public interface Cell { byte getTypeByte(); - //6) MvccVersion - - /** - * @deprecated as of 1.0, use {@link Cell#getSequenceId()} - * - * Internal use only. A region-specific sequence ID given to each operation. It always exists for - * cells in the memstore but is not retained forever. It may survive several flushes, but - * generally becomes irrelevant after the cell's row is no longer involved in any operations that - * require strict consistency. - * @return mvccVersion (always >= 0 if exists), or 0 if it no longer exists - */ - @Deprecated - long getMvccVersion(); + //6) SequenceId /** * A region-specific unique monotonically increasing sequence ID given to each Cell. It always @@ -187,7 +169,7 @@ public interface Cell { * @return Number of value bytes. Must be < valueArray.length - offset. */ int getValueLength(); - + /** * @return the tags byte array */ @@ -202,44 +184,4 @@ public interface Cell { * @return the total length of the tags in the Cell. */ int getTagsLength(); - - /** - * WARNING do not use, expensive. This gets an arraycopy of the cell's value. - * - * Added to ease transition from 0.94 -> 0.96. - * - * @deprecated as of 0.96, use {@link CellUtil#cloneValue(Cell)} - */ - @Deprecated - byte[] getValue(); - - /** - * WARNING do not use, expensive. This gets an arraycopy of the cell's family. - * - * Added to ease transition from 0.94 -> 0.96. - * - * @deprecated as of 0.96, use {@link CellUtil#cloneFamily(Cell)} - */ - @Deprecated - byte[] getFamily(); - - /** - * WARNING do not use, expensive. This gets an arraycopy of the cell's qualifier. - * - * Added to ease transition from 0.94 -> 0.96. - * - * @deprecated as of 0.96, use {@link CellUtil#cloneQualifier(Cell)} - */ - @Deprecated - byte[] getQualifier(); - - /** - * WARNING do not use, expensive. this gets an arraycopy of the cell's row. - * - * Added to ease transition from 0.94 -> 0.96. - * - * @deprecated as of 0.96, use {@link CellUtil#getRowByte(Cell, int)} - */ - @Deprecated - byte[] getRow(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index a9b59e525bb..207f2752cff 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -1335,11 +1335,6 @@ public final class CellUtil { return 0; } - @Override - public long getMvccVersion() { - return getSequenceId(); - } - @Override public long getSequenceId() { return 0; @@ -1374,26 +1369,6 @@ public final class CellUtil { public int getTagsLength() { return 0; } - - @Override - public byte[] getValue() { - return EMPTY_BYTE_ARRAY; - } - - @Override - public byte[] getFamily() { - return EMPTY_BYTE_ARRAY; - } - - @Override - public byte[] getQualifier() { - return EMPTY_BYTE_ARRAY; - } - - @Override - public byte[] getRow() { - return EMPTY_BYTE_ARRAY; - } } @InterfaceAudience.Private @@ -1432,11 +1407,6 @@ public final class CellUtil { public byte getTypeByte() { return Type.Maximum.getCode(); } - - @Override - public byte[] getRow() { - return Bytes.copy(this.rowArray, this.roffset, this.rlength); - } } @InterfaceAudience.Private @@ -1488,16 +1458,6 @@ public final class CellUtil { public int getQualifierLength() { return this.qlength; } - - @Override - public byte[] getFamily() { - return Bytes.copy(this.fArray, this.foffset, this.flength); - } - - @Override - public byte[] getQualifier() { - return Bytes.copy(this.qArray, this.qoffset, this.qlength); - } } @InterfaceAudience.Private @@ -1553,11 +1513,6 @@ public final class CellUtil { public byte getTypeByte() { return Type.Minimum.getCode(); } - - @Override - public byte[] getRow() { - return Bytes.copy(this.rowArray, this.roffset, this.rlength); - } } @InterfaceAudience.Private @@ -1609,15 +1564,5 @@ public final class CellUtil { public int getQualifierLength() { return this.qlength; } - - @Override - public byte[] getFamily() { - return Bytes.copy(this.fArray, this.foffset, this.flength); - } - - @Override - public byte[] getQualifier() { - return Bytes.copy(this.qArray, this.qoffset, this.qlength); - } } } \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 2fc79754077..8c739848fa3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -50,12 +50,13 @@ import com.google.common.annotations.VisibleForTesting; /** * An HBase Key/Value. This is the fundamental HBase Type. *

- * HBase applications and users should use the Cell interface and avoid directly using KeyValue - * and member functions not defined in Cell. + * HBase applications and users should use the Cell interface and avoid directly using KeyValue and + * member functions not defined in Cell. *

- * If being used client-side, the primary methods to access individual fields are {@link #getRow()}, - * {@link #getFamily()}, {@link #getQualifier()}, {@link #getTimestamp()}, and {@link #getValue()}. - * These methods allocate new byte arrays and return copies. Avoid their use server-side. + * If being used client-side, the primary methods to access individual fields are + * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()}, + * {@link #getTimestamp()}, and {@link #getValueArray()}. These methods allocate new byte arrays + * and return copies. Avoid their use server-side. *

* Instances of this class are immutable. They do not implement Comparable but Comparators are * provided. Comparators change with context, whether user table or a catalog table comparison. Its @@ -64,23 +65,20 @@ import com.google.common.annotations.VisibleForTesting; *

* KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start * interpreting the content as KeyValue. The KeyValue format inside a byte array is: - * <keylength> <valuelength> <key> <value> - * Key is further decomposed as: - * <rowlength> <row> <columnfamilylength> + * <keylength> <valuelength> <key> <value> Key is further + * decomposed as: <rowlength> <row> <columnfamilylength> * <columnfamily> <columnqualifier> - * <timestamp> <keytype> - * The rowlength maximum is Short.MAX_SIZE, column family length maximum - * is Byte.MAX_SIZE, and column qualifier + key length must be < - * Integer.MAX_SIZE. The column does not contain the family/qualifier delimiter, - * {@link #COLUMN_FAMILY_DELIMITER}
+ * <timestamp> <keytype>
The rowlength maximum is + * Short.MAX_SIZE, column family length maximum is Byte.MAX_SIZE, and + * column qualifier + key length must be < Integer.MAX_SIZE. The column does not + * contain the family/qualifier delimiter, {@link #COLUMN_FAMILY_DELIMITER}
* KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after * the value part. The format for this part is: <tagslength><tagsbytes>. * tagslength maximum is Short.MAX_SIZE. The tagsbytes * contain one or more tags where as each tag is of the form - * <taglength><tagtype><tagbytes>. - * tagtype is one byte and - * taglength maximum is Short.MAX_SIZE and it includes 1 byte type length - * and actual tag bytes length. + * <taglength><tagtype><tagbytes>. tagtype is one byte + * and taglength maximum is Short.MAX_SIZE and it includes 1 byte type + * length and actual tag bytes length. */ @InterfaceAudience.Private public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, @@ -296,12 +294,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, /** Here be dragons **/ - // used to achieve atomic operations in the memstore. - @Override - public long getMvccVersion() { - return this.getSequenceId(); - } - /** * used to achieve atomic operations in the memstore. */ @@ -1172,9 +1164,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, */ public Map toStringMap() { Map stringMap = new HashMap(); - stringMap.put("row", Bytes.toStringBinary(getRow())); - stringMap.put("family", Bytes.toStringBinary(getFamily())); - stringMap.put("qualifier", Bytes.toStringBinary(getQualifier())); + stringMap.put("row", Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength())); + stringMap.put("family", + Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength())); + stringMap.put("qualifier", + Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(), getQualifierLength())); stringMap.put("timestamp", getTimestamp()); stringMap.put("vlen", getValueLength()); List tags = getTags(); @@ -1472,10 +1466,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, //--------------------------------------------------------------------------- /** - * Do not use unless you have to. Used internally for compacting and testing. - * - * Use {@link #getRow()}, {@link #getFamily()}, {@link #getQualifier()}, and - * {@link #getValue()} if accessing a KeyValue client-side. + * Do not use unless you have to. Used internally for compacting and testing. Use + * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()}, and + * {@link #getValueArray()} if accessing a KeyValue client-side. * @return Copy of the key portion only. */ public byte [] getKey() { @@ -1485,33 +1478,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, return key; } - /** - * Returns value in a new byte array. - * Primarily for use client-side. If server-side, use - * {@link #getBuffer()} with appropriate offsets and lengths instead to - * save on allocations. - * @return Value in a new byte array. - */ - @Override - @Deprecated // use CellUtil.getValueArray() - public byte [] getValue() { - return CellUtil.cloneValue(this); - } - - /** - * Primarily for use client-side. Returns the row of this KeyValue in a new - * byte array.

- * - * If server-side, use {@link #getBuffer()} with appropriate offsets and - * lengths instead. - * @return Row in a new byte array. - */ - @Override - @Deprecated // use CellUtil.getRowArray() - public byte [] getRow() { - return CellUtil.cloneRow(this); - } - /** * * @return Timestamp @@ -1556,35 +1522,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, return KeyValue.isDelete(getType()); } - /** - * Primarily for use client-side. Returns the family of this KeyValue in a - * new byte array.

- * - * If server-side, use {@link #getBuffer()} with appropriate offsets and - * lengths instead. - * @return Returns family. Makes a copy. - */ - @Override - @Deprecated // use CellUtil.getFamilyArray - public byte [] getFamily() { - return CellUtil.cloneFamily(this); - } - - /** - * Primarily for use client-side. Returns the column qualifier of this - * KeyValue in a new byte array.

- * - * If server-side, use {@link #getBuffer()} with appropriate offsets and - * lengths instead. - * Use {@link #getBuffer()} with appropriate offsets and lengths instead. - * @return Returns qualifier. Makes a copy. - */ - @Override - @Deprecated // use CellUtil.getQualifierArray - public byte [] getQualifier() { - return CellUtil.cloneQualifier(this); - } - /** * This returns the offset where the tag actually starts. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java index 50a409d8a6b..0de627a6451 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java @@ -73,9 +73,9 @@ public class KeyValueTestUtil { /** * Checks whether KeyValues from kvCollection2 are contained in kvCollection1. - * + * * The comparison is made without distinguishing MVCC version of the KeyValues - * + * * @param kvCollection1 * @param kvCollection2 * @return true if KeyValues from kvCollection2 are contained in kvCollection1 @@ -91,7 +91,7 @@ public class KeyValueTestUtil { } return true; } - + public static List rewindThenToList(final ByteBuffer bb, final boolean includesMemstoreTS, final boolean useTags) { bb.rewind(); @@ -161,15 +161,16 @@ public class KeyValueTestUtil { } protected static String getRowString(final KeyValue kv) { - return Bytes.toStringBinary(kv.getRow()); + return Bytes.toStringBinary(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()); } protected static String getFamilyString(final KeyValue kv) { - return Bytes.toStringBinary(kv.getFamily()); + return Bytes.toStringBinary(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength()); } protected static String getQualifierString(final KeyValue kv) { - return Bytes.toStringBinary(kv.getQualifier()); + return Bytes.toStringBinary(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength()); } protected static String getTimestampString(final KeyValue kv) { @@ -177,11 +178,11 @@ public class KeyValueTestUtil { } protected static String getTypeString(final KeyValue kv) { - return KeyValue.Type.codeToType(kv.getType()).toString(); + return KeyValue.Type.codeToType(kv.getTypeByte()).toString(); } protected static String getValueString(final KeyValue kv) { - return Bytes.toStringBinary(kv.getValue()); + return Bytes.toStringBinary(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 74a26a27a6b..59519e0c641 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -81,7 +81,7 @@ public class KeyValueUtil { public static int lengthWithMvccVersion(final KeyValue kv, final boolean includeMvccVersion) { int length = kv.getLength(); if (includeMvccVersion) { - length += WritableUtils.getVIntSize(kv.getMvccVersion()); + length += WritableUtils.getVIntSize(kv.getSequenceId()); } return length; } @@ -101,7 +101,7 @@ public class KeyValueUtil { public static KeyValue copyToNewKeyValue(final Cell cell) { byte[] bytes = copyToNewByteArray(cell); KeyValue kvCell = new KeyValue(bytes, 0, bytes.length); - kvCell.setSequenceId(cell.getMvccVersion()); + kvCell.setSequenceId(cell.getSequenceId()); return kvCell; } @@ -173,9 +173,9 @@ public class KeyValueUtil { bb.limit(bb.position() + kv.getLength()); bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); if (includeMvccVersion) { - int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getMvccVersion()); + int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getSequenceId()); ByteBufferUtils.extendLimit(bb, numMvccVersionBytes); - ByteBufferUtils.writeVLong(bb, kv.getMvccVersion()); + ByteBufferUtils.writeVLong(bb, kv.getSequenceId()); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java index 2ed89f17f0f..a54c76eed8f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java @@ -56,7 +56,7 @@ public class CellCodec implements Codec { // Value write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); // MvccVersion - this.out.write(Bytes.toBytes(cell.getMvccVersion())); + this.out.write(Bytes.toBytes(cell.getSequenceId())); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java index a614026c1a9..d79be175214 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java @@ -58,7 +58,7 @@ public class CellCodecWithTags implements Codec { // Tags write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); // MvccVersion - this.out.write(Bytes.toBytes(cell.getMvccVersion())); + this.out.write(Bytes.toBytes(cell.getSequenceId())); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 5107271859d..03875dc3b6a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -24,13 +24,12 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.Streamable; import org.apache.hadoop.hbase.SettableSequenceId; +import org.apache.hadoop.hbase.Streamable; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBufferOutputStream; import org.apache.hadoop.hbase.io.HeapSize; @@ -256,11 +255,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return currentKey.getTypeByte(); } - @Override - public long getMvccVersion() { - return memstoreTS; - } - @Override public long getSequenceId() { return memstoreTS; @@ -302,30 +296,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return tagsLength; } - @Override - @Deprecated - public byte[] getValue() { - throw new UnsupportedOperationException("getValue() not supported"); - } - - @Override - @Deprecated - public byte[] getFamily() { - throw new UnsupportedOperationException("getFamily() not supported"); - } - - @Override - @Deprecated - public byte[] getQualifier() { - throw new UnsupportedOperationException("getQualifier() not supported"); - } - - @Override - @Deprecated - public byte[] getRow() { - throw new UnsupportedOperationException("getRow() not supported"); - } - @Override public String toString() { return KeyValue.keyToString(this.keyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen=" @@ -334,7 +304,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { public Cell shallowCopy() { return new ClonedSeekerState(currentBuffer, keyBuffer, currentKey.getRowLength(), - currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength, + currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength, currentKey.getQualifierOffset(), currentKey.getQualifierLength(), currentKey.getTimestamp(), currentKey.getTypeByte(), valueLength, valueOffset, memstoreTS, tagsOffset, tagsLength, tagCompressionContext, tagsBuffer); @@ -342,9 +312,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { } /** - * Copies only the key part of the keybuffer by doing a deep copy and passes the + * Copies only the key part of the keybuffer by doing a deep copy and passes the * seeker state members for taking a clone. - * Note that the value byte[] part is still pointing to the currentBuffer and the + * Note that the value byte[] part is still pointing to the currentBuffer and the * represented by the valueOffset and valueLength */ // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId @@ -372,7 +342,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { private byte[] cloneTagsBuffer; private long seqId; private TagCompressionContext tagCompressionContext; - + protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength, int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength, long timeStamp, byte typeByte, int valueLen, int valueOffset, long seqId, @@ -455,12 +425,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return typeByte; } - @Override - @Deprecated - public long getMvccVersion() { - return getSequenceId(); - } - @Override public long getSequenceId() { return seqId; @@ -502,30 +466,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder { return tagsLength; } - @Override - @Deprecated - public byte[] getValue() { - return CellUtil.cloneValue(this); - } - - @Override - @Deprecated - public byte[] getFamily() { - return CellUtil.cloneFamily(this); - } - - @Override - @Deprecated - public byte[] getQualifier() { - return CellUtil.cloneQualifier(this); - } - - @Override - @Deprecated - public byte[] getRow() { - return CellUtil.cloneRow(this); - } - @Override public String toString() { return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen=" diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java index 26b513c071d..fa98f70b885 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java @@ -301,7 +301,7 @@ public class RedundantKVGenerator { for (KeyValue kv : keyValues) { totalSize += kv.getLength(); if (includesMemstoreTS) { - totalSize += WritableUtils.getVIntSize(kv.getMvccVersion()); + totalSize += WritableUtils.getVIntSize(kv.getSequenceId()); } } @@ -309,7 +309,7 @@ public class RedundantKVGenerator { for (KeyValue kv : keyValues) { result.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); if (includesMemstoreTS) { - ByteBufferUtils.writeVLong(result, kv.getMvccVersion()); + ByteBufferUtils.writeVLong(result, kv.getSequenceId()); } } return result; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index ed8f901783a..d9e76e6545d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hbase; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.math.BigDecimal; @@ -154,12 +156,6 @@ public class TestCellUtil { return 0; } - @Override - public long getMvccVersion() { - // TODO Auto-generated method stub - return 0; - } - @Override public byte[] getValueArray() { // TODO Auto-generated method stub @@ -190,30 +186,6 @@ public class TestCellUtil { return 0; } - @Override - public byte[] getValue() { - // TODO Auto-generated method stub - return null; - } - - @Override - public byte[] getFamily() { - // TODO Auto-generated method stub - return null; - } - - @Override - public byte[] getQualifier() { - // TODO Auto-generated method stub - return null; - } - - @Override - public byte[] getRow() { - // TODO Auto-generated method stub - return null; - } - @Override public long getSequenceId() { // TODO Auto-generated method stub @@ -592,11 +564,6 @@ public class TestCellUtil { return KeyValue.Type.Put.getCode(); } - @Override - public long getMvccVersion() { - return 0; - } - @Override public long getSequenceId() { return 0; @@ -639,26 +606,6 @@ public class TestCellUtil { return tagsLen; } - @Override - public byte[] getValue() { - throw new UnsupportedOperationException(); - } - - @Override - public byte[] getFamily() { - throw new UnsupportedOperationException(); - } - - @Override - public byte[] getQualifier() { - throw new UnsupportedOperationException(); - } - - @Override - public byte[] getRow() { - throw new UnsupportedOperationException(); - } - @Override public ByteBuffer getRowByteBuffer() { return this.buffer; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 5cb61c39e50..cc1e511f52e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -93,7 +93,8 @@ public class TestKeyValue extends TestCase { private void check(final byte [] row, final byte [] family, byte [] qualifier, final long timestamp, final byte [] value) { KeyValue kv = new KeyValue(row, family, qualifier, timestamp, value); - assertTrue(Bytes.compareTo(kv.getRow(), row) == 0); + assertTrue(Bytes.compareTo(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0, + row.length) == 0); assertTrue(CellUtil.matchingColumn(kv, family, qualifier)); // Call toString to make sure it works. LOG.info(kv.toString()); @@ -390,9 +391,10 @@ public class TestKeyValue extends TestCase { // keys are still the same assertTrue(kv1.equals(kv1ko)); // but values are not - assertTrue(kv1ko.getValue().length == (useLen?Bytes.SIZEOF_INT:0)); + assertTrue(kv1ko.getValueLength() == (useLen?Bytes.SIZEOF_INT:0)); if (useLen) { - assertEquals(kv1.getValueLength(), Bytes.toInt(kv1ko.getValue())); + assertEquals(kv1.getValueLength(), + Bytes.toInt(kv1ko.getValueArray(), kv1ko.getValueOffset(), kv1ko.getValueLength())); } } } @@ -442,10 +444,14 @@ public class TestKeyValue extends TestCase { KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] { new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) }); assertTrue(kv.getTagsLength() > 0); - assertTrue(Bytes.equals(kv.getRow(), row)); - assertTrue(Bytes.equals(kv.getFamily(), cf)); - assertTrue(Bytes.equals(kv.getQualifier(), q)); - assertTrue(Bytes.equals(kv.getValue(), value)); + assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0, + row.length)); + assertTrue(Bytes.equals(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(), cf, 0, + cf.length)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), q, 0, q.length)); + assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value, 0, + value.length)); List tags = kv.getTags(); assertNotNull(tags); assertEquals(2, tags.size()); @@ -492,7 +498,7 @@ public class TestKeyValue extends TestCase { Bytes.equals(next.getValue(), metaValue2); assertFalse(tagItr.hasNext()); } - + public void testMetaKeyComparator() { CellComparator c = CellComparator.META_COMPARATOR; long now = System.currentTimeMillis(); @@ -500,23 +506,23 @@ public class TestKeyValue extends TestCase { KeyValue a = new KeyValue(Bytes.toBytes("table1"), now); KeyValue b = new KeyValue(Bytes.toBytes("table2"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table1,111"), now); b = new KeyValue(Bytes.toBytes("table2"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table1"), now); b = new KeyValue(Bytes.toBytes("table2,111"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table,111"), now); b = new KeyValue(Bytes.toBytes("table,2222"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now); b = new KeyValue(Bytes.toBytes("table,2222"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table,111"), now); b = new KeyValue(Bytes.toBytes("table,2222.bbb"), now); assertTrue(c.compare(a, b) < 0); @@ -524,7 +530,7 @@ public class TestKeyValue extends TestCase { a = new KeyValue(Bytes.toBytes("table,,aaaa"), now); b = new KeyValue(Bytes.toBytes("table,111,bbb"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now); b = new KeyValue(Bytes.toBytes("table,111,bbb"), now); assertTrue(c.compare(a, b) < 0); @@ -532,7 +538,7 @@ public class TestKeyValue extends TestCase { a = new KeyValue(Bytes.toBytes("table,111,xxxx"), now); b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now); assertTrue(c.compare(a, b) < 0); - + a = new KeyValue(Bytes.toBytes("table,111,11,xxx"), now); b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now); assertTrue(c.compare(a, b) < 0); @@ -590,12 +596,6 @@ public class TestKeyValue extends TestCase { return this.kv.getTagsOffset(); } - // used to achieve atomic operations in the memstore. - @Override - public long getMvccVersion() { - return this.kv.getMvccVersion(); - } - /** * used to achieve atomic operations in the memstore. */ @@ -613,7 +613,7 @@ public class TestKeyValue extends TestCase { } /** - * + * * @return Timestamp */ @Override @@ -729,34 +729,6 @@ public class TestKeyValue extends TestCase { return this.kv.getQualifierLength(); } - @Override - @Deprecated - public byte[] getValue() { - // TODO Auto-generated method stub - return null; - } - - @Override - @Deprecated - public byte[] getFamily() { - // TODO Auto-generated method stub - return null; - } - - @Override - @Deprecated - public byte[] getQualifier() { - // TODO Auto-generated method stub - return null; - } - - @Override - @Deprecated - public byte[] getRow() { - // TODO Auto-generated method stub - return null; - } - /** * @return the backing array of the entire KeyValue (all KeyValue fields are * in a single array) diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java index a6b7cc52cb1..bd2a29d35d4 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java @@ -40,7 +40,7 @@ public class TestByteRangeWithKVSerialization { pbr.put((byte) (tagsLen >> 8 & 0xff)); pbr.put((byte) (tagsLen & 0xff)); pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen); - pbr.putVLong(kv.getMvccVersion()); + pbr.putVLong(kv.getSequenceId()); } static KeyValue readCell(PositionedByteRange pbr) throws Exception { @@ -88,7 +88,7 @@ public class TestByteRangeWithKVSerialization { Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(), kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(), kv1.getTagsLength())); - Assert.assertEquals(kv1.getMvccVersion(), kv.getMvccVersion()); + Assert.assertEquals(kv1.getSequenceId(), kv.getSequenceId()); } } } \ No newline at end of file diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java index a49d9cad292..adc0eb7581a 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedListWithVisibility.java @@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionLocation; @@ -74,24 +75,24 @@ import org.junit.experimental.categories.Category; /** * IT test used to verify the deletes with visibility labels. - * The test creates three tables tablename_0, tablename_1 and tablename_2 and each table + * The test creates three tables tablename_0, tablename_1 and tablename_2 and each table * is associated with a unique pair of labels. - * Another common table with the name 'commontable' is created and it has the data combined - * from all these 3 tables such that there are 3 versions of every row but the visibility label - * in every row corresponds to the table from which the row originated. - * Then deletes are issued to the common table by selecting the visibility label - * associated with each of the smaller tables. - * After the delete is issued with one set of visibility labels we try to scan the common table - * with each of the visibility pairs defined for the 3 tables. - * So after the first delete is issued, a scan with the first set of visibility labels would - * return zero result whereas the scan issued with the other two sets of visibility labels - * should return all the rows corresponding to that set of visibility labels. The above - * process of delete and scan is repeated until after the last set of visibility labels are + * Another common table with the name 'commontable' is created and it has the data combined + * from all these 3 tables such that there are 3 versions of every row but the visibility label + * in every row corresponds to the table from which the row originated. + * Then deletes are issued to the common table by selecting the visibility label + * associated with each of the smaller tables. + * After the delete is issued with one set of visibility labels we try to scan the common table + * with each of the visibility pairs defined for the 3 tables. + * So after the first delete is issued, a scan with the first set of visibility labels would + * return zero result whereas the scan issued with the other two sets of visibility labels + * should return all the rows corresponding to that set of visibility labels. The above + * process of delete and scan is repeated until after the last set of visibility labels are * used for the deletes the common table should not return any row. - * - * To use this + * + * To use this * ./hbase org.apache.hadoop.hbase.test.IntegrationTestBigLinkedListWithVisibility Loop 1 1 20000 /tmp 1 10000 - * or + * or * ./hbase org.apache.hadoop.hbase.IntegrationTestsDriver -r .*IntegrationTestBigLinkedListWithVisibility.* */ @Category(IntegrationTests.class) @@ -211,7 +212,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) { Put put = new Put(current[i]); put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]); - + if (count >= 0) { put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i)); } @@ -331,7 +332,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB @Override protected void processKV(ImmutableBytesWritable key, Result result, org.apache.hadoop.mapreduce.Mapper.Context context, Put put, - org.apache.hadoop.hbase.client.Delete delete) throws + org.apache.hadoop.hbase.client.Delete delete) throws IOException, InterruptedException { String visibilityExps = split[index * 2] + OR + split[(index * 2) + 1]; for (Cell kv : result.rawCells()) { @@ -343,7 +344,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB delete = new Delete(key.get()); } delete.setCellVisibility(new CellVisibility(visibilityExps)); - delete.deleteFamily(kv.getFamily()); + delete.deleteFamily(CellUtil.cloneFamily(kv)); } if (delete != null) { context.write(key, delete); @@ -356,14 +357,14 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB super.addOptions(); addOptWithArg("u", USER_OPT, "User name"); } - + @Override protected void processOptions(CommandLine cmd) { super.processOptions(cmd); if (cmd.hasOption(USER_OPT)) { userName = cmd.getOptionValue(USER_OPT); } - + } @Override public void setUpCluster() throws Exception { @@ -561,7 +562,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB } } - private void verify(int numReducers, long expectedNumNodes, + private void verify(int numReducers, long expectedNumNodes, Path iterationOutput, Verify verify) throws Exception { verify.setConf(getConf()); int retCode = verify.run(iterationOutput, numReducers); diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java index 7fceaa5465a..d903d790219 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java @@ -102,7 +102,7 @@ public class PrefixTreeCodec implements DataBlockEncoder { ByteBufferUtils.skip(result, keyValueLength); offset += keyValueLength; if (includesMvcc) { - ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion()); + ByteBufferUtils.writeVLong(result, currentCell.getSequenceId()); } } result.position(result.limit());//make it appear as if we were appending diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java index 8a45f1312f4..eefd953c36b 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeSeeker.java @@ -314,12 +314,6 @@ public class PrefixTreeSeeker implements EncodedSeeker { return type; } - @Override - @Deprecated - public long getMvccVersion() { - return getSequenceId(); - } - @Override public long getSequenceId() { return seqId; @@ -355,30 +349,6 @@ public class PrefixTreeSeeker implements EncodedSeeker { return this.tagsLength; } - @Override - @Deprecated - public byte[] getValue() { - return this.val; - } - - @Override - @Deprecated - public byte[] getFamily() { - return this.fam; - } - - @Override - @Deprecated - public byte[] getQualifier() { - return this.qual; - } - - @Override - @Deprecated - public byte[] getRow() { - return this.row; - } - @Override public String toString() { String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()); diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java index f06634c9c38..82d1d7eef6c 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeCell.java @@ -131,18 +131,13 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable d = generator.generateTestKeyValues(numColumns); for (KeyValue col : d) { - ByteRange colRange = new SimpleMutableByteRange(col.getQualifier()); + ByteRange colRange = new SimpleMutableByteRange(CellUtil.cloneQualifier(col)); inputs.add(colRange); sortedColumns.add(colRange); } diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java index ec115511584..0c8caf4dce7 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java @@ -181,7 +181,7 @@ public class TestRowEncoder { // assert keys are equal (doesn't compare values) Assert.assertEquals(expected, actual); if (includeMemstoreTS) { - Assert.assertEquals(expected.getMvccVersion(), actual.getMvccVersion()); + Assert.assertEquals(expected.getSequenceId(), actual.getSequenceId()); } // assert values equal Assert.assertTrue(Bytes.equals(expected.getValueArray(), expected.getValueOffset(), diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index a12c747f18b..f85d745b8ed 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -115,7 +115,8 @@ public class RemoteHTable implements Table { if (o instanceof byte[]) { sb.append(Bytes.toStringBinary((byte[])o)); } else if (o instanceof KeyValue) { - sb.append(Bytes.toStringBinary(((KeyValue)o).getQualifier())); + sb.append(Bytes.toStringBinary(((KeyValue) o).getRowArray(), + ((KeyValue) o).getRowOffset(), ((KeyValue) o).getRowLength())); } else { throw new RuntimeException("object type not handled"); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java index 3acddc1aa30..22ee31d39dd 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java @@ -19,6 +19,10 @@ package org.apache.hadoop.hbase.rest; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + import java.io.ByteArrayInputStream; import java.io.StringWriter; import java.util.ArrayList; @@ -32,16 +36,24 @@ import javax.xml.bind.Unmarshaller; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.BinaryComparator; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.filter.InclusiveStopFilter; import org.apache.hadoop.hbase.filter.PageFilter; @@ -52,8 +64,6 @@ import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.filter.SkipFilter; import org.apache.hadoop.hbase.filter.SubstringComparator; import org.apache.hadoop.hbase.filter.ValueFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Response; @@ -64,9 +74,6 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.util.Bytes; - -import static org.junit.Assert.*; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -108,7 +115,7 @@ public class TestScannersWithFilters { }; private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - private static final HBaseRESTTestingUtility REST_TEST_UTIL = + private static final HBaseRESTTestingUtility REST_TEST_UTIL = new HBaseRESTTestingUtility(); private static Client client; private static JAXBContext context; @@ -128,7 +135,7 @@ public class TestScannersWithFilters { ScannerModel.class); marshaller = context.createMarshaller(); unmarshaller = context.createUnmarshaller(); - client = new Client(new Cluster().add("localhost", + client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())); Admin admin = TEST_UTIL.getHBaseAdmin(); if (!admin.tableExists(TABLE)) { @@ -154,7 +161,7 @@ public class TestScannersWithFilters { } table.put(p); } - + // Insert second half (reverse families) for(byte [] ROW : ROWS_ONE) { Put p = new Put(ROW); @@ -172,14 +179,14 @@ public class TestScannersWithFilters { } table.put(p); } - + // Delete the second qualifier from all rows and families for(byte [] ROW : ROWS_ONE) { Delete d = new Delete(ROW); d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]); d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]); table.delete(d); - } + } for(byte [] ROW : ROWS_TWO) { Delete d = new Delete(ROW); d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]); @@ -187,7 +194,7 @@ public class TestScannersWithFilters { table.delete(d); } colsPerRow -= 2; - + // Delete the second rows from both groups, one column at a time for(byte [] QUALIFIER : QUALIFIERS_ONE) { Delete d = new Delete(ROWS_ONE[1]); @@ -212,7 +219,7 @@ public class TestScannersWithFilters { TEST_UTIL.shutdownMiniCluster(); } - private static void verifyScan(Scan s, long expectedRows, long expectedKeys) + private static void verifyScan(Scan s, long expectedRows, long expectedKeys) throws Exception { ScannerModel model = ScannerModel.fromScan(s); model.setBatch(Integer.MAX_VALUE); // fetch it all at once @@ -234,7 +241,7 @@ public class TestScannersWithFilters { unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); int rows = cells.getRows().size(); - assertTrue("Scanned too many rows! Only expected " + expectedRows + + assertTrue("Scanned too many rows! Only expected " + expectedRows + " total but scanned " + rows, expectedRows == rows); for (RowModel row: cells.getRows()) { int count = row.getCells().size(); @@ -247,7 +254,7 @@ public class TestScannersWithFilters { assertEquals(response.getCode(), 200); } - private static void verifyScanFull(Scan s, KeyValue [] kvs) + private static void verifyScanFull(Scan s, KeyValue [] kvs) throws Exception { ScannerModel model = ScannerModel.fromScan(s); model.setBatch(Integer.MAX_VALUE); // fetch it all at once @@ -281,19 +288,19 @@ public class TestScannersWithFilters { RowModel rowModel = i.next(); List cells = rowModel.getCells(); if (cells.isEmpty()) break; - assertTrue("Scanned too many keys! Only expected " + kvs.length + - " total but already scanned " + (cells.size() + idx), + assertTrue("Scanned too many keys! Only expected " + kvs.length + + " total but already scanned " + (cells.size() + idx), kvs.length >= idx + cells.size()); for (CellModel cell: cells) { - assertTrue("Row mismatch", - Bytes.equals(rowModel.getKey(), kvs[idx].getRow())); + assertTrue("Row mismatch", + Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx]))); byte[][] split = KeyValue.parseColumn(cell.getColumn()); - assertTrue("Family mismatch", - Bytes.equals(split[0], kvs[idx].getFamily())); - assertTrue("Qualifier mismatch", - Bytes.equals(split[1], kvs[idx].getQualifier())); - assertTrue("Value mismatch", - Bytes.equals(cell.getValue(), kvs[idx].getValue())); + assertTrue("Family mismatch", + Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx]))); + assertTrue("Qualifier mismatch", + Bytes.equals(split[1], CellUtil.cloneQualifier(kvs[idx]))); + assertTrue("Value mismatch", + Bytes.equals(cell.getValue(), CellUtil.cloneValue(kvs[idx]))); idx++; } } @@ -309,7 +316,7 @@ public class TestScannersWithFilters { marshaller.marshal(model, writer); LOG.debug(writer.toString()); byte[] body = Bytes.toBytes(writer.toString()); - Response response = client.put("/" + TABLE + "/scanner", + Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); assertEquals(response.getCode(), 201); String scannerURI = response.getLocation(); @@ -334,7 +341,7 @@ public class TestScannersWithFilters { RowModel rowModel = i.next(); List cells = rowModel.getCells(); if (cells.isEmpty()) break; - assertTrue("Scanned too many rows! Only expected " + expectedRows + + assertTrue("Scanned too many rows! Only expected " + expectedRows + " total but already scanned " + (j+1), expectedRows > j); assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + cells.size(), expectedKeys, cells.size()); @@ -348,7 +355,7 @@ public class TestScannersWithFilters { // No filter long expectedRows = numRows; long expectedKeys = colsPerRow; - + // Both families Scan s = new Scan(); verifyScan(s, expectedRows, expectedKeys); @@ -416,7 +423,7 @@ public class TestScannersWithFilters { new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) }; - + // Grab all 6 rows long expectedRows = 6; long expectedKeys = colsPerRow; @@ -425,7 +432,7 @@ public class TestScannersWithFilters { verifyScan(s, expectedRows, expectedKeys); s.setFilter(new PageFilter(expectedRows)); verifyScanFull(s, expectedKVs); - + // Grab first 4 rows (6 cols per row) expectedRows = 4; expectedKeys = colsPerRow; @@ -434,7 +441,7 @@ public class TestScannersWithFilters { verifyScan(s, expectedRows, expectedKeys); s.setFilter(new PageFilter(expectedRows)); verifyScanFull(s, Arrays.copyOf(expectedKVs, 24)); - + // Grab first 2 rows expectedRows = 2; expectedKeys = colsPerRow; @@ -451,20 +458,20 @@ public class TestScannersWithFilters { s.setFilter(new PageFilter(expectedRows)); verifyScan(s, expectedRows, expectedKeys); s.setFilter(new PageFilter(expectedRows)); - verifyScanFull(s, Arrays.copyOf(expectedKVs, 6)); + verifyScanFull(s, Arrays.copyOf(expectedKVs, 6)); } @Test public void testInclusiveStopFilter() throws Exception { // Grab rows from group one - + // If we just use start/stop row, we get total/2 - 1 rows long expectedRows = (numRows / 2) - 1; long expectedKeys = colsPerRow; - Scan s = new Scan(Bytes.toBytes("testRowOne-0"), + Scan s = new Scan(Bytes.toBytes("testRowOne-0"), Bytes.toBytes("testRowOne-3")); verifyScan(s, expectedRows, expectedKeys); - + // Now use start row with inclusive stop filter expectedRows = numRows / 2; s = new Scan(Bytes.toBytes("testRowOne-0")); @@ -472,14 +479,14 @@ public class TestScannersWithFilters { verifyScan(s, expectedRows, expectedKeys); // Grab rows from group two - + // If we just use start/stop row, we get total/2 - 1 rows expectedRows = (numRows / 2) - 1; expectedKeys = colsPerRow; - s = new Scan(Bytes.toBytes("testRowTwo-0"), + s = new Scan(Bytes.toBytes("testRowTwo-0"), Bytes.toBytes("testRowTwo-3")); verifyScan(s, expectedRows, expectedKeys); - + // Now use start row with inclusive stop filter expectedRows = numRows / 2; s = new Scan(Bytes.toBytes("testRowTwo-0")); @@ -497,7 +504,7 @@ public class TestScannersWithFilters { Scan s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys less than same qualifier // Expect only two keys (one from each family) in half the rows expectedRows = numRows / 2; @@ -507,7 +514,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys less than or equal // Expect four keys (two from each family) in half the rows expectedRows = numRows / 2; @@ -517,7 +524,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys not equal // Expect four keys (two from each family) // Only look in first group of rows @@ -528,7 +535,7 @@ public class TestScannersWithFilters { s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo")); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys greater or equal // Expect four keys (two from each family) // Only look in first group of rows @@ -539,7 +546,7 @@ public class TestScannersWithFilters { s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo")); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys greater // Expect two keys (one from each family) // Only look in first group of rows @@ -550,7 +557,7 @@ public class TestScannersWithFilters { s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo")); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys not equal to // Look across rows and fully validate the keys and ordering // Expect varied numbers of keys, 4 per row in group one, 6 per row in @@ -559,7 +566,7 @@ public class TestScannersWithFilters { new BinaryComparator(QUALIFIERS_ONE[2])); s = new Scan(); s.setFilter(f); - + KeyValue [] kvs = { // testRowOne-0 new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), @@ -599,7 +606,7 @@ public class TestScannersWithFilters { new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), }; verifyScanFull(s, kvs); - + // Test across rows and groups with a regex // Filter out "test*-2" // Expect 4 keys per row across both groups @@ -607,7 +614,7 @@ public class TestScannersWithFilters { new RegexStringComparator("test.+-2")); s = new Scan(); s.setFilter(f); - + kvs = new KeyValue [] { // testRowOne-0 new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), @@ -653,7 +660,7 @@ public class TestScannersWithFilters { Scan s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match a two rows, one from each group, using regex expectedRows = 2; expectedKeys = colsPerRow; @@ -662,7 +669,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match rows less than // Expect all keys in one row expectedRows = 1; @@ -672,7 +679,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match rows less than or equal // Expect all keys in two rows expectedRows = 2; @@ -682,7 +689,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match rows not equal // Expect all keys in all but one row expectedRows = numRows - 1; @@ -692,7 +699,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys greater or equal // Expect all keys in all but one row expectedRows = numRows - 1; @@ -702,7 +709,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match keys greater // Expect all keys in all but two rows expectedRows = numRows - 2; @@ -712,7 +719,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match rows not equal to testRowTwo-2 // Look across rows and fully validate the keys and ordering // Should see all keys in all rows but testRowTwo-2 @@ -720,7 +727,7 @@ public class TestScannersWithFilters { new BinaryComparator(Bytes.toBytes("testRowOne-2"))); s = new Scan(); s.setFilter(f); - + KeyValue [] kvs = { // testRowOne-0 new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), @@ -759,7 +766,7 @@ public class TestScannersWithFilters { new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), }; verifyScanFull(s, kvs); - + // Test across rows and groups with a regex // Filter out everything that doesn't match "*-2" // Expect all keys in two rows @@ -767,7 +774,7 @@ public class TestScannersWithFilters { new RegexStringComparator(".+-2")); s = new Scan(); s.setFilter(f); - + kvs = new KeyValue [] { // testRowOne-2 new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), @@ -825,7 +832,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match values less than or equal // Expect all rows expectedRows = numRows; @@ -845,7 +852,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match values not equal // Expect half the rows expectedRows = numRows / 2; @@ -855,7 +862,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match values greater or equal // Expect all rows expectedRows = numRows; @@ -865,7 +872,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match values greater // Expect half rows expectedRows = numRows / 2; @@ -875,7 +882,7 @@ public class TestScannersWithFilters { s = new Scan(); s.setFilter(f); verifyScanNoEarlyOut(s, expectedRows, expectedKeys); - + // Match values not equal to testValueOne // Look across rows and fully validate the keys and ordering // Should see all keys in all group two rows @@ -883,7 +890,7 @@ public class TestScannersWithFilters { new BinaryComparator(Bytes.toBytes("testValueOne"))); s = new Scan(); s.setFilter(f); - + KeyValue [] kvs = { // testRowTwo-0 new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), @@ -918,7 +925,7 @@ public class TestScannersWithFilters { new BinaryComparator(Bytes.toBytes("testQualifierOne-2")))); Scan s = new Scan(); s.setFilter(f); - + KeyValue [] kvs = { // testRowTwo-0 new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), @@ -947,7 +954,7 @@ public class TestScannersWithFilters { @Test public void testFilterList() throws Exception { - // Test getting a single row, single key using Row, Qualifier, and Value + // Test getting a single row, single key using Row, Qualifier, and Value // regular expression and substring filters // Use must pass all List filters = new ArrayList(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java index eb6d3bef82f..3ada51a67d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TagRewriteCell.java @@ -106,12 +106,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta return cell.getTypeByte(); } - @Override - @Deprecated - public long getMvccVersion() { - return getSequenceId(); - } - @Override public long getSequenceId() { return cell.getSequenceId(); @@ -151,30 +145,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta return this.tags.length; } - @Override - @Deprecated - public byte[] getValue() { - return cell.getValue(); - } - - @Override - @Deprecated - public byte[] getFamily() { - return cell.getFamily(); - } - - @Override - @Deprecated - public byte[] getQualifier() { - return cell.getQualifier(); - } - - @Override - @Deprecated - public byte[] getRow() { - return cell.getRow(); - } - @Override public long heapSize() { long sum = CellUtil.estimatedHeapSizeOf(cell) - cell.getTagsLength(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index c067fc3fa2e..9d9cee0e8a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -17,6 +17,12 @@ */ package org.apache.hadoop.hbase.mapreduce; +import java.io.IOException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Map; +import java.util.TreeMap; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -48,12 +54,6 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import java.io.IOException; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Map; -import java.util.TreeMap; - /** * A tool to replay WAL files as a M/R job. * The WAL can be replayed for a set of tables or all tables, @@ -106,8 +106,8 @@ public class WALPlayer extends Configured implements Tool { if (Bytes.equals(table, key.getTablename().getName())) { for (Cell cell : value.getCells()) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); - if (WALEdit.isMetaEditFamily(kv.getFamily())) continue; - context.write(new ImmutableBytesWritable(kv.getRow()), kv); + if (WALEdit.isMetaEditFamily(kv)) continue; + context.write(new ImmutableBytesWritable(CellUtil.cloneRow(kv)), kv); } } } catch (InterruptedException e) { @@ -149,7 +149,7 @@ public class WALPlayer extends Configured implements Tool { Cell lastCell = null; for (Cell cell : value.getCells()) { // filtering WAL meta entries - if (WALEdit.isMetaEditFamily(cell.getFamily())) continue; + if (WALEdit.isMetaEditFamily(cell)) continue; // Allow a subclass filter out this cell. if (filter(context, cell)) { @@ -163,9 +163,9 @@ public class WALPlayer extends Configured implements Tool { if (put != null) context.write(tableOut, put); if (del != null) context.write(tableOut, del); if (CellUtil.isDelete(cell)) { - del = new Delete(cell.getRow()); + del = new Delete(CellUtil.cloneRow(cell)); } else { - put = new Put(cell.getRow()); + put = new Put(CellUtil.cloneRow(cell)); } } if (CellUtil.isDelete(cell)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java index d6dfb76dc45..02a73f8c91a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java @@ -329,7 +329,7 @@ public class ScanQueryMatcher { * they affect */ byte typeByte = cell.getTypeByte(); - long mvccVersion = cell.getMvccVersion(); + long mvccVersion = cell.getSequenceId(); if (CellUtil.isDelete(cell)) { if (keepDeletedCells == KeepDeletedCells.FALSE || (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java index 45610fa8a7e..bb49aba56c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java @@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue; @@ -168,7 +169,7 @@ public class StripeStoreFileManager // Order matters for this call. result.addSublist(state.level0Files); if (!state.stripeFiles.isEmpty()) { - int lastStripeIndex = findStripeForRow(targetKey.getRow(), false); + int lastStripeIndex = findStripeForRow(CellUtil.cloneRow(targetKey), false); for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) { result.addSublist(state.stripeFiles.get(stripeIndex)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java index f6619e8e0f5..a752ff19517 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java @@ -288,7 +288,7 @@ public class WALEdit implements Writable, HeapSize { public static FlushDescriptor getFlushDescriptor(Cell cell) throws IOException { if (CellUtil.matchingColumn(cell, METAFAMILY, FLUSH)) { - return FlushDescriptor.parseFrom(cell.getValue()); + return FlushDescriptor.parseFrom(CellUtil.cloneValue(cell)); } return null; } @@ -302,7 +302,7 @@ public class WALEdit implements Writable, HeapSize { public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException { if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) { - return RegionEventDescriptor.parseFrom(cell.getValue()); + return RegionEventDescriptor.parseFrom(CellUtil.cloneValue(cell)); } return null; } @@ -336,7 +336,7 @@ public class WALEdit implements Writable, HeapSize { */ public static CompactionDescriptor getCompaction(Cell kv) throws IOException { if (CellUtil.matchingColumn(kv, METAFAMILY, COMPACTION)) { - return CompactionDescriptor.parseFrom(kv.getValue()); + return CompactionDescriptor.parseFrom(CellUtil.cloneValue(kv)); } return null; } @@ -365,7 +365,7 @@ public class WALEdit implements Writable, HeapSize { */ public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException { if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) { - return WALProtos.BulkLoadDescriptor.parseFrom(cell.getValue()); + return WALProtos.BulkLoadDescriptor.parseFrom(CellUtil.cloneValue(cell)); } return null; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java index 166dc37210b..3501f3e70ba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java @@ -23,6 +23,7 @@ import java.util.NavigableMap; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.wal.WAL.Entry; @@ -44,8 +45,8 @@ public class ScopeWALEntryFilter implements WALEntryFilter { Cell cell = cells.get(i); // The scope will be null or empty if // there's nothing to replicate in that WALEdit - if (!scopes.containsKey(cell.getFamily()) - || scopes.get(cell.getFamily()) == HConstants.REPLICATION_SCOPE_LOCAL) { + byte[] fam = CellUtil.cloneFamily(cell); + if (!scopes.containsKey(fam) || scopes.get(fam) == HConstants.REPLICATION_SCOPE_LOCAL) { cells.remove(i); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java index b8925125423..0cbbcef0496 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java @@ -25,9 +25,10 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WAL.Entry; public class TableCfWALEntryFilter implements WALEntryFilter { @@ -62,7 +63,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter { Cell cell = cells.get(i); // ignore(remove) kv if its cf isn't in the replicable cf list // (empty cfs means all cfs of this table are replicable) - if ((cfs != null && !cfs.contains(Bytes.toString(cell.getFamily())))) { + if ((cfs != null && !cfs.contains(Bytes.toString(CellUtil.cloneFamily(cell))))) { cells.remove(i); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java index c75f81f1b77..b3db0f616dd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; @@ -354,7 +355,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint { } sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(), - entries.get(0).getEdit().getCells().get(0).getRow(), entries); + CellUtil.cloneRow(entries.get(0).getEdit().getCells().get(0)), entries); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java index 5b0f469a04a..b396dfcc270 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java @@ -244,7 +244,7 @@ public class Replication extends WALActionsListener.Base implements new TreeMap(Bytes.BYTES_COMPARATOR); byte[] family; for (Cell cell : logEdit.getCells()) { - family = cell.getFamily(); + family = CellUtil.cloneFamily(cell); // This is expected and the KV should not be replicated if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue; // Unexpected, has a tendency to happen in unit tests diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 7e9299a9399..8bd69a29f47 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -329,9 +329,9 @@ public class AccessController extends BaseMasterAndRegionObserver List kvList = (List)family.getValue(); for (KeyValue kv : kvList) { if (!authManager.authorize(user, tableName, family.getKey(), - kv.getQualifier(), permRequest)) { - return AuthResult.deny(request, "Failed qualifier check", user, - permRequest, tableName, makeFamilyMap(family.getKey(), kv.getQualifier())); + CellUtil.cloneQualifier(kv), permRequest)) { + return AuthResult.deny(request, "Failed qualifier check", user, permRequest, + tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv))); } } } @@ -749,7 +749,7 @@ public class AccessController extends BaseMasterAndRegionObserver } } } else if (entry.getValue() == null) { - get.addFamily(col); + get.addFamily(col); } else { throw new RuntimeException("Unhandled collection type " + entry.getValue().getClass().getName()); @@ -1308,7 +1308,7 @@ public class AccessController extends BaseMasterAndRegionObserver @Override public void preModifyNamespace(ObserverContext ctx, NamespaceDescriptor ns) throws IOException { - // We require only global permission so that + // We require only global permission so that // a user with NS admin cannot altering namespace configurations. i.e. namespace quota requireGlobalPermission("modifyNamespace", Action.ADMIN, ns.getName()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 0e20903129f..0c3ff8390d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -393,7 +393,7 @@ public class HBaseFsck extends Configured implements Closeable { LOG.info("Failed to create lock file " + hbckLockFilePath.getName() + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of " + retryCounter.getMaxAttempts()); - LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(), + LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(), ioe); try { exception = ioe; @@ -880,7 +880,7 @@ public class HBaseFsck extends Configured implements Closeable { hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf()); hf.loadFileInfo(); Cell startKv = hf.getFirstKey(); - start = startKv.getRow(); + start = CellUtil.cloneRow(startKv); Cell endKv = hf.getLastKey(); end = CellUtil.cloneRow(endKv); } catch (IOException ioe) { @@ -2685,10 +2685,10 @@ public class HBaseFsck extends Configured implements Closeable { } regionsFromMeta = Ordering.natural().immutableSortedCopy(regions); } - + return regionsFromMeta; } - + private class IntegrityFixSuggester extends TableIntegrityErrorHandlerImpl { ErrorReporter errors; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java index e579164d501..4c55cb336c4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALPrettyPrinter.java @@ -33,8 +33,6 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; -import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -43,14 +41,14 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.Tag; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.classification.InterfaceStability; +import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.codehaus.jackson.map.ObjectMapper; -import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; -// imports for things that haven't moved yet. -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; - /** * WALPrettyPrinter prints the contents of a given WAL with a variety of * options affecting formatting and extent of content. @@ -245,7 +243,7 @@ public class WALPrettyPrinter { } WAL.Reader log = WALFactory.createReader(fs, p, conf); - + if (log instanceof ProtobufLogReader) { List writerClsNames = ((ProtobufLogReader) log).getWriterClsNames(); if (writerClsNames != null && writerClsNames.size() > 0) { @@ -258,18 +256,18 @@ public class WALPrettyPrinter { } out.println(); } - + String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName(); if (cellCodecClsName != null) { out.println("Cell Codec Class: " + cellCodecClsName); } } - + if (outputJSON && !persistentOutput) { out.print("["); firstTxn = true; } - + try { WAL.Entry entry; while ((entry = log.next()) != null) { @@ -288,7 +286,7 @@ public class WALPrettyPrinter { for (Cell cell : edit.getCells()) { // add atomic operation to txn Map op = new HashMap(toStringMap(cell)); - if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue())); + if (outputValues) op.put("value", Bytes.toStringBinary(CellUtil.cloneValue(cell))); // check row output filter if (row == null || ((String) op.get("row")).equals(row)) { actions.add(op); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index 1b3fcf2844a..0dd8beacb96 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -3744,11 +3744,11 @@ public class TestFromClientSide { // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0); - assertTrue(Bytes.equals(kv.getFamily(), CONTENTS_FAMILY)); + assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), CONTENTS_FAMILY)); // will it return null or an empty byte array? - assertTrue(Bytes.equals(kv.getQualifier(), new byte[0])); + assertTrue(Bytes.equals(CellUtil.cloneQualifier(kv), new byte[0])); - assertTrue(Bytes.equals(kv.getValue(), value)); + assertTrue(Bytes.equals(CellUtil.cloneValue(kv), value)); table.put(put); @@ -5335,7 +5335,7 @@ public class TestFromClientSide { assertEquals(1, regionsList.size()); } } - + private List getRegionsInRange(TableName tableName, byte[] startKey, byte[] endKey) throws IOException { List regionsInRange = new ArrayList(); @@ -5778,8 +5778,11 @@ public class TestFromClientSide { int expectedIndex = 5; for (Result result : scanner) { assertEquals(result.size(), 1); - assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[expectedIndex])); - assertTrue(Bytes.equals(result.rawCells()[0].getQualifier(), QUALIFIERS[expectedIndex])); + Cell c = result.rawCells()[0]; + assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(), + ROWS[expectedIndex], 0, ROWS[expectedIndex].length)); + assertTrue(Bytes.equals(c.getQualifierArray(), c.getQualifierOffset(), + c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length)); expectedIndex--; } assertEquals(expectedIndex, 0); @@ -5817,7 +5820,7 @@ public class TestFromClientSide { for (Result result : ht.getScanner(scan)) { assertEquals(result.size(), 1); assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT); - assertEquals(Bytes.toInt(result.rawCells()[0].getValue()), VALUE.length); + assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length); count++; } assertEquals(count, 10); @@ -6099,15 +6102,15 @@ public class TestFromClientSide { result = scanner.next(); assertTrue("Expected 2 keys but received " + result.size(), result.size() == 2); - assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[4])); - assertTrue(Bytes.equals(result.rawCells()[1].getRow(), ROWS[4])); - assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[1])); - assertTrue(Bytes.equals(result.rawCells()[1].getValue(), VALUES[2])); + assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[4])); + assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[1]), ROWS[4])); + assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[1])); + assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[1]), VALUES[2])); result = scanner.next(); assertTrue("Expected 1 key but received " + result.size(), result.size() == 1); - assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[3])); - assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[0])); + assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[3])); + assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[0])); scanner.close(); ht.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index c46056d8536..9be2f6423e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -27,6 +27,7 @@ import java.util.ConcurrentModificationException; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -106,8 +107,8 @@ public class TestPutDeleteEtcCellIteration { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); KeyValue kv = (KeyValue)cell; - assertTrue(Bytes.equals(kv.getFamily(), bytes)); - assertTrue(Bytes.equals(kv.getValue(), bytes)); + assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes)); + assertTrue(Bytes.equals(CellUtil.cloneValue(kv), bytes)); } assertEquals(COUNT, index); } @@ -125,8 +126,8 @@ public class TestPutDeleteEtcCellIteration { int value = index; byte [] bytes = Bytes.toBytes(index++); KeyValue kv = (KeyValue)cell; - assertTrue(Bytes.equals(kv.getFamily(), bytes)); - long a = Bytes.toLong(kv.getValue()); + assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes)); + long a = Bytes.toLong(CellUtil.cloneValue(kv)); assertEquals(value, a); } assertEquals(COUNT, index); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java index dd8c7b4c752..6e1a03833d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java @@ -20,19 +20,19 @@ package org.apache.hadoop.hbase.coprocessor; import java.io.IOException; -import java.util.List; import java.util.Arrays; +import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.wal.WALKey; /** * Class for testing WALObserver coprocessor. @@ -119,8 +119,8 @@ implements WALObserver { Cell deletedCell = null; for (Cell cell : cells) { // assume only one kv from the WALEdit matches. - byte[] family = cell.getFamily(); - byte[] qulifier = cell.getQualifier(); + byte[] family = CellUtil.cloneFamily(cell); + byte[] qulifier = CellUtil.cloneQualifier(cell); if (Arrays.equals(family, ignoredFamily) && Arrays.equals(qulifier, ignoredQualifier)) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java index 601db766024..6707354976c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java @@ -66,10 +66,10 @@ import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile.Reader; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.hbase.wal.WALKey; import com.google.common.collect.ImmutableList; @@ -221,13 +221,13 @@ public class SimpleRegionObserver extends BaseRegionObserver { List metaEntries) throws IOException { ctPreSplitBeforePONR.incrementAndGet(); } - + @Override public void preSplitAfterPONR( ObserverContext ctx) throws IOException { ctPreSplitAfterPONR.incrementAndGet(); } - + @Override public void postSplit(ObserverContext c, Region l, Region r) { ctPostSplit.incrementAndGet(); @@ -370,7 +370,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { } @Override - public void prePut(final ObserverContext c, + public void prePut(final ObserverContext c, final Put put, final WALEdit edit, final Durability durability) throws IOException { Map> familyMap = put.getFamilyCellMap(); @@ -384,20 +384,23 @@ public class SimpleRegionObserver extends BaseRegionObserver { assertNotNull(cells); assertNotNull(cells.get(0)); KeyValue kv = (KeyValue)cells.get(0); - assertTrue(Bytes.equals(kv.getQualifier(), - TestRegionObserverInterface.A)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), TestRegionObserverInterface.A, 0, + TestRegionObserverInterface.A.length)); cells = familyMap.get(TestRegionObserverInterface.B); assertNotNull(cells); assertNotNull(cells.get(0)); kv = (KeyValue)cells.get(0); - assertTrue(Bytes.equals(kv.getQualifier(), - TestRegionObserverInterface.B)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), TestRegionObserverInterface.B, 0, + TestRegionObserverInterface.B.length)); cells = familyMap.get(TestRegionObserverInterface.C); assertNotNull(cells); assertNotNull(cells.get(0)); kv = (KeyValue)cells.get(0); - assertTrue(Bytes.equals(kv.getQualifier(), - TestRegionObserverInterface.C)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), TestRegionObserverInterface.C, 0, + TestRegionObserverInterface.C.length)); } ctPrePut.incrementAndGet(); } @@ -418,25 +421,31 @@ public class SimpleRegionObserver extends BaseRegionObserver { assertNotNull(cells.get(0)); // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO KeyValue kv = (KeyValue)cells.get(0); - assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.A)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), TestRegionObserverInterface.A, 0, + TestRegionObserverInterface.A.length)); cells = familyMap.get(TestRegionObserverInterface.B); assertNotNull(cells); assertNotNull(cells.get(0)); // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO kv = (KeyValue)cells.get(0); - assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.B)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), TestRegionObserverInterface.B, 0, + TestRegionObserverInterface.B.length)); cells = familyMap.get(TestRegionObserverInterface.C); assertNotNull(cells); assertNotNull(cells.get(0)); // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO kv = (KeyValue)cells.get(0); - assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.C)); + assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength(), TestRegionObserverInterface.C, 0, + TestRegionObserverInterface.C.length)); } ctPostPut.incrementAndGet(); } @Override - public void preDelete(final ObserverContext c, + public void preDelete(final ObserverContext c, final Delete delete, final WALEdit edit, final Durability durability) throws IOException { Map> familyMap = delete.getFamilyCellMap(); @@ -456,7 +465,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { } @Override - public void postDelete(final ObserverContext c, + public void postDelete(final ObserverContext c, final Delete delete, final WALEdit edit, final Durability durability) throws IOException { Map> familyMap = delete.getFamilyCellMap(); @@ -467,7 +476,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { ctBeforeDelete.set(0); ctPostDeleted.incrementAndGet(); } - + @Override public void preBatchMutate(ObserverContext c, MiniBatchOperationInProgress miniBatchOp) throws IOException { @@ -604,7 +613,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { } @Override - public Result preAppendAfterRowLock(ObserverContext e, + public Result preAppendAfterRowLock(ObserverContext e, Append append) throws IOException { ctPreAppendAfterRowLock.incrementAndGet(); return null; @@ -724,7 +733,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { public boolean hadPostPut() { return ctPostPut.get() > 0; } - + public boolean hadPreBatchMutate() { return ctPreBatchMutate.get() > 0; } @@ -784,7 +793,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { public boolean hadPreIncrement() { return ctPreIncrement.get() > 0; } - + public boolean hadPreIncrementAfterRowLock() { return ctPreIncrementAfterRowLock.get() > 0; } @@ -808,7 +817,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { public boolean hadPrePreparedDeleteTS() { return ctPrePrepareDeleteTS.get() > 0; } - + public boolean hadPreWALRestore() { return ctPreWALRestore.get() > 0; } @@ -874,7 +883,7 @@ public class SimpleRegionObserver extends BaseRegionObserver { public int getCtPreSplit() { return ctPreSplit.get(); } - + public int getCtPreSplitBeforePONR() { return ctPreSplitBeforePONR.get(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java index cdcdeed391a..a3c106dc9a4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java @@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -51,11 +52,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import org.apache.hadoop.hbase.wal.DefaultWALProvider; -import org.apache.hadoop.hbase.wal.WAL; -import org.apache.hadoop.hbase.wal.WALFactory; -import org.apache.hadoop.hbase.wal.WALKey; -import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -63,14 +59,19 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.wal.DefaultWALProvider; +import org.apache.hadoop.hbase.wal.WAL; +import org.apache.hadoop.hbase.wal.WALFactory; +import org.apache.hadoop.hbase.wal.WALKey; +import org.apache.hadoop.hbase.wal.WALSplitter; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.TestName; import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; /** * Tests invocation of the @@ -216,14 +217,14 @@ public class TestWALObserver { List cells = edit.getCells(); for (Cell cell : cells) { - if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) { + if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) { foundFamily0 = true; } - if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) { + if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) { foundFamily2 = true; } - if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) { - if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) { + if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) { + if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) { modifiedFamily1 = true; } } @@ -244,14 +245,14 @@ public class TestWALObserver { foundFamily2 = false; modifiedFamily1 = false; for (Cell cell : cells) { - if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) { + if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) { foundFamily0 = true; } - if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) { + if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) { foundFamily2 = true; } - if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) { - if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) { + if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) { + if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) { modifiedFamily1 = true; } } @@ -462,7 +463,7 @@ public class TestWALObserver { /* * Creates an HRI around an HTD that has tableName and three * column families named. - * + * * @param tableName Name of table to use when we create HTableDescriptor. */ private HRegionInfo createBasic3FamilyHRegionInfo(final String tableName) { @@ -496,7 +497,7 @@ public class TestWALObserver { /** * Copied from HRegion. - * + * * @param familyMap * map of family->edits * @param walEdit diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index 3601b0115d2..92be81a9f31 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -534,7 +534,7 @@ public class TestFilter { ArrayList values = new ArrayList(); boolean isMoreResults = scanner.next(values); if (!isMoreResults - || !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) { + || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { Assert.assertTrue( "The WhileMatchFilter should now filter all remaining", filter.filterAllRemaining()); @@ -581,7 +581,7 @@ public class TestFilter { /** - * The following filter simulates a pre-0.96 filter where filterRow() is defined while + * The following filter simulates a pre-0.96 filter where filterRow() is defined while * hasFilterRow() returns false */ static class OldTestFilter extends FilterBase { @@ -592,25 +592,25 @@ public class TestFilter { public boolean hasFilterRow() { return false; } - + @Override public boolean filterRow() { // always filter out rows return true; } - + @Override public ReturnCode filterKeyValue(Cell ignored) throws IOException { return ReturnCode.INCLUDE; } } - + /** - * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in - * 0.96+ code base. - * + * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in + * 0.96+ code base. + * * See HBASE-10366 - * + * * @throws Exception */ @Test @@ -1558,7 +1558,7 @@ public class TestFilter { }; for(KeyValue kv : srcKVs) { - Put put = new Put(kv.getRow()).add(kv); + Put put = new Put(CellUtil.cloneRow(kv)).add(kv); put.setDurability(Durability.SKIP_WAL); this.region.put(put); } @@ -1597,7 +1597,7 @@ public class TestFilter { // Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0] KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]); - this.region.put(new Put(kvA.getRow()).add(kvA)); + this.region.put(new Put(CellUtil.cloneRow(kvA)).add(kvA)); // Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true // Expect 1 row (3) @@ -1971,7 +1971,7 @@ public class TestFilter { verifyScanFullNoValues(s, expectedKVs, useLen); } } - + /** * Filter which makes sleeps for a second between each row of a scan. * This can be useful for manual testing of bugs like HBASE-5973. For example: @@ -1984,7 +1984,7 @@ public class TestFilter { */ public static class SlowScanFilter extends FilterBase { private static Thread ipcHandlerThread = null; - + @Override public byte [] toByteArray() {return null;} @@ -2099,5 +2099,5 @@ public class TestFilter { WAL wal = ((HRegion)testRegion).getWAL(); ((HRegion)testRegion).close(); wal.close(); - } + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index 8854efeb2b3..440c9f56c65 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -266,7 +266,7 @@ public class TestFilterList { byte[] r1 = Bytes.toBytes("Row1"); byte[] r11 = Bytes.toBytes("Row11"); byte[] r2 = Bytes.toBytes("Row2"); - + FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE); flist.addFilter(new PrefixFilter(r1)); flist.filterRowKey(KeyValueUtil.createFirstOnRow(r1)); @@ -276,7 +276,7 @@ public class TestFilterList { flist.reset(); flist.filterRowKey(KeyValueUtil.createFirstOnRow(r2)); assertEquals(flist.filterKeyValue(new KeyValue(r2,r2,r2)), ReturnCode.SKIP); - + flist = new FilterList(FilterList.Operator.MUST_PASS_ONE); flist.addFilter(new AlwaysNextColFilter()); flist.addFilter(new PrefixFilter(r1)); @@ -298,7 +298,7 @@ public class TestFilterList { byte[] r1 = Bytes.toBytes("Row1"); byte[] r11 = Bytes.toBytes("Row11"); byte[] r2 = Bytes.toBytes("Row2"); - + FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE); flist.addFilter(new AlwaysNextColFilter()); flist.addFilter(new InclusiveStopFilter(r1)); @@ -390,7 +390,7 @@ public class TestFilterList { Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter })); // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL. assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpOnefilterList.filterKeyValue(null)); - // INCLUDE, SKIP, INCLUDE. + // INCLUDE, SKIP, INCLUDE. assertEquals(Filter.ReturnCode.INCLUDE, mpOnefilterList.filterKeyValue(null)); // Check must pass all filter. @@ -398,7 +398,7 @@ public class TestFilterList { Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter })); // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL. assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpAllfilterList.filterKeyValue(null)); - // INCLUDE, SKIP, INCLUDE. + // INCLUDE, SKIP, INCLUDE. assertEquals(Filter.ReturnCode.SKIP, mpAllfilterList.filterKeyValue(null)); } @@ -417,7 +417,7 @@ public class TestFilterList { public byte [] toByteArray() { return null; } - + @Override public ReturnCode filterKeyValue(Cell ignored) throws IOException { return ReturnCode.INCLUDE; @@ -541,12 +541,13 @@ public class TestFilterList { // Value for fam:qual1 should be stripped: assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1)); final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1)); - assertEquals(0, transformedQual1.getValue().length); + assertEquals(0, transformedQual1.getValueLength()); // Value for fam:qual2 should not be stripped: assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2)); final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2)); - assertEquals("value", Bytes.toString(transformedQual2.getValue())); + assertEquals("value", Bytes.toString(transformedQual2.getValueArray(), + transformedQual2.getValueOffset(), transformedQual2.getValueLength())); // Other keys should be skipped: assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java index 5d7fa3d4dc9..0e5f08e086a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -102,7 +103,7 @@ public class TestHalfStoreFileReader { HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); r.loadFileInfo(); Cell midKV = r.midkey(); - byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow(); + byte[] midkey = CellUtil.cloneRow(midKV); //System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey)); @@ -167,7 +168,7 @@ public class TestHalfStoreFileReader { HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); r.loadFileInfo(); Cell midKV = r.midkey(); - byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow(); + byte[] midkey = CellUtil.cloneRow(midKV); Reference bottom = new Reference(midkey, Reference.Range.bottom); Reference top = new Reference(midkey, Reference.Range.top); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java index 2d478a43c46..91115c1dfc6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTreeEncoding.java @@ -37,6 +37,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; @@ -74,7 +75,7 @@ public class TestPrefixTreeEncoding { CellComparator.COMPARATOR); private static boolean formatRowNum = false; - + @Parameters public static Collection parameters() { List paramList = new ArrayList(); @@ -88,7 +89,7 @@ public class TestPrefixTreeEncoding { public TestPrefixTreeEncoding(boolean includesTag) { this.includesTag = includesTag; } - + @Before public void setUp() throws Exception { kvset.clear(); @@ -132,7 +133,8 @@ public class TestPrefixTreeEncoding { new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey .getKeyLength()), true); assertNotNull(seeker.getKeyValue()); - assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker.getKeyValue().getRow()); + assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), + CellUtil.cloneRow(seeker.getKeyValue())); // Seek before the last keyvalue; seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES); @@ -140,7 +142,8 @@ public class TestPrefixTreeEncoding { new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey .getKeyLength()), true); assertNotNull(seeker.getKeyValue()); - assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker.getKeyValue().getRow()); + assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), + CellUtil.cloneRow(seeker.getKeyValue())); } @Test @@ -226,7 +229,7 @@ public class TestPrefixTreeEncoding { onDiskBytes.length - DataBlockEncoding.ID_SIZE); verifySeeking(seeker, readBuffer, batchId); } - + private void verifySeeking(EncodedSeeker encodeSeeker, ByteBuffer encodedData, int batchId) { List kvList = new ArrayList(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index fa0cfecacf8..af8a6ccf5e2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -18,9 +18,6 @@ */ package org.apache.hadoop.hbase.io.hfile; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; @@ -242,7 +239,7 @@ public class TestHFile extends HBaseTestCase { /** * test none codecs - * @param useTags + * @param useTags */ void basicWithSomeCodec(String codec, boolean useTags) throws IOException { if (useTags) { @@ -311,12 +308,12 @@ public class TestHFile extends HBaseTestCase { writer.appendMetaBlock("HFileMeta" + i, new Writable() { private int val; public Writable setVal(int val) { this.val = val; return this; } - + @Override public void write(DataOutput out) throws IOException { out.write(("something to test" + val).getBytes()); } - + @Override public void readFields(DataInput in) throws IOException { } }.setVal(i)); @@ -330,7 +327,7 @@ public class TestHFile extends HBaseTestCase { private void readNumMetablocks(Reader reader, int n) throws IOException { for (int i = 0; i < n; i++) { ByteBuffer actual = reader.getMetaBlock("HFileMeta" + i, false); - ByteBuffer expected = + ByteBuffer expected = ByteBuffer.wrap(("something to test" + i).getBytes()); assertEquals("failed to match metadata", Bytes.toStringBinary(expected), Bytes.toStringBinary(actual)); @@ -377,7 +374,7 @@ public class TestHFile extends HBaseTestCase { @Test public void testNullMetaBlocks() throws Exception { if (cacheConf == null) cacheConf = new CacheConfig(conf); - for (Compression.Algorithm compressAlgo : + for (Compression.Algorithm compressAlgo : HBaseTestingUtility.COMPRESSION_ALGORITHMS) { Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile"); FSDataOutputStream fout = createFSOutput(mFile); @@ -512,8 +509,8 @@ public class TestHFile extends HBaseTestCase { newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2); assertTrue(keyComparator.compare(kv1, newKey) < 0); assertTrue((keyComparator.compare(kv2, newKey)) > 0); - assertTrue(Arrays.equals(newKey.getFamily(), family)); - assertTrue(Arrays.equals(newKey.getQualifier(), qualB)); + assertTrue(Arrays.equals(CellUtil.cloneFamily(newKey), family)); + assertTrue(Arrays.equals(CellUtil.cloneQualifier(newKey), qualB)); assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP); assertTrue(newKey.getTypeByte() == Type.Maximum.getCode()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java index 1bfd18cdc17..dfc5569a056 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java @@ -48,6 +48,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -131,7 +132,7 @@ public class TestHFileBlock { // generate it or repeat, it should compress well if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { - row = keyValues.get(randomizer.nextInt(keyValues.size())).getRow(); + row = CellUtil.cloneRow(keyValues.get(randomizer.nextInt(keyValues.size()))); } else { row = new byte[FIELD_LENGTH]; randomizer.nextBytes(row); @@ -140,17 +141,16 @@ public class TestHFileBlock { family = new byte[FIELD_LENGTH]; randomizer.nextBytes(family); } else { - family = keyValues.get(0).getFamily(); + family = CellUtil.cloneFamily(keyValues.get(0)); } if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { - qualifier = keyValues.get( - randomizer.nextInt(keyValues.size())).getQualifier(); + qualifier = CellUtil.cloneQualifier(keyValues.get(randomizer.nextInt(keyValues.size()))); } else { qualifier = new byte[FIELD_LENGTH]; randomizer.nextBytes(qualifier); } if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { - value = keyValues.get(randomizer.nextInt(keyValues.size())).getValue(); + value = CellUtil.cloneValue(keyValues.get(randomizer.nextInt(keyValues.size()))); } else { value = new byte[FIELD_LENGTH]; randomizer.nextBytes(value); @@ -837,7 +837,7 @@ public class TestHFileBlock { .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .withChecksumType(ChecksumType.NULL).build(); HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf, - HFileBlock.FILL_HEADER, -1, + HFileBlock.FILL_HEADER, -1, 0, meta); long byteBufferExpectedSize = ClassSize.align(ClassSize.estimateBase(buf.getClass(), true) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java index fc44f3cf0cd..ebe35b3b542 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java @@ -480,7 +480,7 @@ public class TestHFileBlockCompatibility { this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream); this.unencodedDataSizeWritten += kv.getLength(); if (dataBlockEncodingCtx.getHFileContext().isIncludesMvcc()) { - this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getMvccVersion()); + this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getSequenceId()); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java index 883f60eaf75..253dff8a1ad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java @@ -125,7 +125,7 @@ public class TestHFileWriterV2 { writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris")); writer.close(); - + FSDataInputStream fsdis = fs.open(hfilePath); @@ -144,7 +144,7 @@ public class TestHFileWriterV2 { .withIncludesTags(false) .withCompression(compressAlgo) .build(); - + HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(fsdis, fileSize, meta); // Comparator class name is stored in the trailer in version 2. CellComparator comparator = trailer.createComparator(); @@ -162,12 +162,12 @@ public class TestHFileWriterV2 { dataBlockIndexReader.readMultiLevelIndexRoot( blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount()); - + if (findMidKey) { Cell midkey = dataBlockIndexReader.midkey(); assertNotNull("Midkey should not be null", midkey); } - + // Meta index. metaBlockIndexReader.readRootIndex( blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX) @@ -215,8 +215,10 @@ public class TestHFileWriterV2 { } // A brute-force check to see that all keys and values are correct. - assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0); - assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0); + KeyValue kv = keyValues.get(entriesRead); + assertTrue(Bytes.compareTo(key, kv.getKey()) == 0); + assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(), + kv.getValueLength()) == 0); ++entriesRead; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java index e9ba0893233..9adeaca2af3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV3.java @@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; @@ -160,7 +159,7 @@ public class TestHFileWriterV3 { writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris")); writer.close(); - + FSDataInputStream fsdis = fs.open(hfilePath); @@ -192,12 +191,12 @@ public class TestHFileWriterV3 { // the root level. dataBlockIndexReader.readMultiLevelIndexRoot( blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount()); - + if (findMidKey) { Cell midkey = dataBlockIndexReader.midkey(); assertNotNull("Midkey should not be null", midkey); } - + // Meta index. metaBlockIndexReader.readRootIndex( blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX) @@ -240,7 +239,7 @@ public class TestHFileWriterV3 { tagValue = new byte[tagLen]; buf.get(tagValue); } - + if (includeMemstoreTS) { ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset() + buf.position(), buf.remaining()); @@ -251,11 +250,13 @@ public class TestHFileWriterV3 { } // A brute-force check to see that all keys and values are correct. - assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0); - assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0); + KeyValue kv = keyValues.get(entriesRead); + assertTrue(Bytes.compareTo(key, kv.getKey()) == 0); + assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(), + kv.getValueLength()) == 0); if (useTags) { assertNotNull(tagValue); - KeyValue tkv = keyValues.get(entriesRead); + KeyValue tkv = kv; assertEquals(tagValue.length, tkv.getTagsLength()); assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(), tkv.getTagsOffset(), tkv.getTagsLength()) == 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java index 1de21f3d6e3..bd5b09807d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java @@ -70,11 +70,11 @@ public class TestSeekTo { return paramList; } static boolean switchKVs = false; - + public TestSeekTo(DataBlockEncoding encoding) { this.encoding = encoding; } - + @Before public void setUp() { //reset @@ -107,7 +107,8 @@ public class TestSeekTo { } } static String toRowStr(Cell kv) { - return Bytes.toString(KeyValueUtil.ensureKeyValue(kv).getRow()); + KeyValue c = KeyValueUtil.ensureKeyValue(kv); + return Bytes.toString(c.getRowArray(), c.getRowOffset(), c.getRowLength()); } Path makeNewFile(TagUsage tagUsage) throws IOException { @@ -338,7 +339,7 @@ public class TestSeekTo { Configuration conf = TEST_UTIL.getConfiguration(); HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf); reader.loadFileInfo(); - HFileBlockIndex.BlockIndexReader blockIndexReader = + HFileBlockIndex.BlockIndexReader blockIndexReader = reader.getDataBlockIndexReader(); System.out.println(blockIndexReader.toString()); // falls before the start of the file. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java index b0b35fae103..90b92bb3eab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java @@ -34,8 +34,6 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; -import junit.framework.Assert; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -91,6 +89,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; +import junit.framework.Assert; + /** * Simple test for {@link KeyValueSortReducer} and {@link HFileOutputFormat}. * Sets up and runs a mapreduce job that writes hfile output. @@ -201,8 +201,11 @@ public class TestHFileOutputFormat { KeyValue original = kv.clone(); writer.write(new ImmutableBytesWritable(), kv); assertFalse(original.equals(kv)); - assertTrue(Bytes.equals(original.getRow(), kv.getRow())); - assertTrue(CellUtil.matchingColumn(original, kv.getFamily(), kv.getQualifier())); + assertTrue(Bytes.equals(original.getRowArray(), original.getRowOffset(), + original.getRowLength(), kv.getRowArray(), kv.getRowOffset(), kv.getRowLength())); + assertTrue(CellUtil.matchingColumn(original, kv.getFamilyArray(), kv.getFamilyOffset(), + kv.getFamilyLength(), kv.getQualifierArray(), kv.getQualifierOffset(), + kv.getQualifierLength())); assertNotSame(original.getTimestamp(), kv.getTimestamp()); assertNotSame(HConstants.LATEST_TIMESTAMP, kv.getTimestamp()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index 1109ae2747b..a999624acbc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -344,8 +344,8 @@ public class TestImportExport { assertEquals(now, res[6].getTimestamp()); t.close(); } - - + + @Test public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception { TableName EXPORT_TABLE = @@ -376,8 +376,8 @@ public class TestImportExport { //Add second Delete family marker d = new Delete(ROW1, now+7); exportT.delete(d); - - + + String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(), FQ_OUTPUT_DIR, @@ -403,10 +403,10 @@ public class TestImportExport { Scan s = new Scan(); s.setMaxVersions(); s.setRaw(true); - + ResultScanner importedTScanner = importT.getScanner(s); Result importedTResult = importedTScanner.next(); - + ResultScanner exportedTScanner = exportT.getScanner(s); Result exportedTResult = exportedTScanner.next(); try @@ -504,7 +504,7 @@ public class TestImportExport { results.close(); return count; } - + /** * test main method. Import should print help and call System.exit */ @@ -586,7 +586,7 @@ public class TestImportExport { ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; KeyValue key = (KeyValue) invocation.getArguments()[1]; assertEquals("Key", Bytes.toString(writer.get())); - assertEquals("row", Bytes.toString(key.getRow())); + assertEquals("row", Bytes.toString(CellUtil.cloneRow(key))); return null; } }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); @@ -616,7 +616,7 @@ public class TestImportExport { args.add("param2"); Import.addFilterAndArguments(configuration, FilterBase.class, args); - assertEquals("org.apache.hadoop.hbase.filter.FilterBase", + assertEquals("org.apache.hadoop.hbase.filter.FilterBase", configuration.get(Import.FILTER_CLASS_CONF_KEY)); assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY)); } @@ -700,5 +700,5 @@ public class TestImportExport { public boolean isWALVisited() { return isVisited; } - } + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java index 6dc1d9fef09..a12887e7735 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java @@ -33,11 +33,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MiniHBaseCluster; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; @@ -45,13 +45,13 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper; -import org.apache.hadoop.hbase.wal.WAL; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LauncherSecurityManager; +import org.apache.hadoop.hbase.wal.WAL; +import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.util.ToolRunner; @@ -123,7 +123,7 @@ public class TestWALPlayer { new String[] {walInputDir, TABLENAME1.getNameAsString(), TABLENAME2.getNameAsString() })); - + // verify the WAL was player into table 2 Get g = new Get(ROW); Result r = t2.get(g); @@ -151,15 +151,13 @@ public class TestWALPlayer { WALKey key = mock(WALKey.class); when(key.getTablename()).thenReturn(TableName.valueOf("table")); @SuppressWarnings("unchecked") - Mapper.Context context = - mock(Context.class); + Mapper.Context context = mock(Context.class); when(context.getConfiguration()).thenReturn(configuration); WALEdit value = mock(WALEdit.class); ArrayList values = new ArrayList(); - KeyValue kv1 = mock(KeyValue.class); - when(kv1.getFamily()).thenReturn(Bytes.toBytes("family")); - when(kv1.getRow()).thenReturn(Bytes.toBytes("row")); + KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), null); + values.add(kv1); when(value.getCells()).thenReturn(values); mapper.setup(context); @@ -171,7 +169,7 @@ public class TestWALPlayer { ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; KeyValue key = (KeyValue) invocation.getArguments()[1]; assertEquals("row", Bytes.toString(writer.get())); - assertEquals("row", Bytes.toString(key.getRow())); + assertEquals("row", Bytes.toString(CellUtil.cloneRow(key))); return null; } }).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java index 013053efa92..1fcb3668b47 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALRecordReader.java @@ -100,7 +100,7 @@ public class TestWALRecordReader { fs = TEST_UTIL.getDFSCluster().getFileSystem(); hbaseDir = TEST_UTIL.createRootDir(); - + logDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME); htd = new HTableDescriptor(tableName); @@ -152,7 +152,7 @@ public class TestWALRecordReader { walfactory.shutdown(); LOG.info("Closed WAL " + log.toString()); - + WALInputFormat input = new WALInputFormat(); Configuration jobConf = new Configuration(conf); jobConf.set("mapreduce.input.fileinputformat.inputdir", logDir.toString()); @@ -257,9 +257,14 @@ public class TestWALRecordReader { for (byte[] column : columns) { assertTrue(reader.nextKeyValue()); Cell cell = reader.getCurrentValue().getCells().get(0); - if (!Bytes.equals(column, cell.getQualifier())) { - assertTrue("expected [" + Bytes.toString(column) + "], actual [" - + Bytes.toString(cell.getQualifier()) + "]", false); + if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(), + cell.getQualifierOffset(), cell.getQualifierLength())) { + assertTrue( + "expected [" + + Bytes.toString(column) + + "], actual [" + + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), + cell.getQualifierLength()) + "]", false); } } assertFalse(reader.nextKeyValue()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java index aa57e2230d4..4ce228f5475 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBulkLoad.java @@ -285,7 +285,7 @@ public class TestBulkLoad { @Override protected boolean matchesSafely(WALEdit item) { - assertTrue(Arrays.equals(item.getCells().get(0).getQualifier(), typeBytes)); + assertTrue(Arrays.equals(CellUtil.cloneQualifier(item.getCells().get(0)), typeBytes)); BulkLoadDescriptor desc; try { desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java index c4e0a42ca6c..684839d4196 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCellSkipListSet.java @@ -25,6 +25,7 @@ import junit.framework.TestCase; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -49,15 +50,18 @@ public class TestCellSkipListSet extends TestCase { assertEquals(1, this.csls.size()); Cell first = this.csls.first(); assertTrue(kv.equals(first)); - assertTrue(Bytes.equals(kv.getValue(), first.getValue())); + assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), + first.getValueArray(), first.getValueOffset(), first.getValueLength())); // Now try overwritting byte [] overwriteValue = Bytes.toBytes("overwrite"); KeyValue overwrite = new KeyValue(bytes, bytes, bytes, overwriteValue); this.csls.add(overwrite); assertEquals(1, this.csls.size()); first = this.csls.first(); - assertTrue(Bytes.equals(overwrite.getValue(), first.getValue())); - assertFalse(Bytes.equals(overwrite.getValue(), kv.getValue())); + assertTrue(Bytes.equals(overwrite.getValueArray(), overwrite.getValueOffset(), + overwrite.getValueLength(), first.getValueArray(), first.getValueOffset(), + first.getValueLength())); + assertFalse(Bytes.equals(CellUtil.cloneValue(overwrite), CellUtil.cloneValue(kv))); } public void testIterator() throws Exception { @@ -71,8 +75,10 @@ public class TestCellSkipListSet extends TestCase { // Assert that we added 'total' values and that they are in order int count = 0; for (Cell kv: this.csls) { - assertEquals("" + count, Bytes.toString(kv.getQualifier())); - assertTrue(Bytes.equals(kv.getValue(), value1)); + assertEquals("" + count, + Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())); + assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1, + 0, value1.length)); count++; } assertEquals(total, count); @@ -83,9 +89,11 @@ public class TestCellSkipListSet extends TestCase { // Assert that we added 'total' values and that they are in order and that // we are getting back value2 count = 0; - for (Cell kv: this.csls) { - assertEquals("" + count, Bytes.toString(kv.getQualifier())); - assertTrue(Bytes.equals(kv.getValue(), value2)); + for (Cell kv : this.csls) { + assertEquals("" + count, + Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())); + assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2, + 0, value2.length)); count++; } assertEquals(total, count); @@ -103,8 +111,10 @@ public class TestCellSkipListSet extends TestCase { int count = 0; for (Iterator i = this.csls.descendingIterator(); i.hasNext();) { Cell kv = i.next(); - assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); - assertTrue(Bytes.equals(kv.getValue(), value1)); + assertEquals("" + (total - (count + 1)), + Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())); + assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1, + 0, value1.length)); count++; } assertEquals(total, count); @@ -117,8 +127,10 @@ public class TestCellSkipListSet extends TestCase { count = 0; for (Iterator i = this.csls.descendingIterator(); i.hasNext();) { Cell kv = i.next(); - assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); - assertTrue(Bytes.equals(kv.getValue(), value2)); + assertEquals("" + (total - (count + 1)), + Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())); + assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2, + 0, value2.length)); count++; } assertEquals(total, count); @@ -145,8 +157,10 @@ public class TestCellSkipListSet extends TestCase { this.csls.add(new KeyValue(bytes, bytes, Bytes.toBytes("" + i), value2)); } tail = this.csls.tailSet(splitter); - assertTrue(Bytes.equals(tail.first().getValue(), value2)); + assertTrue(Bytes.equals(tail.first().getValueArray(), tail.first().getValueOffset(), + tail.first().getValueLength(), value2, 0, value2.length)); head = this.csls.headSet(splitter); - assertTrue(Bytes.equals(head.first().getValue(), value2)); + assertTrue(Bytes.equals(head.first().getValueArray(), head.first().getValueOffset(), + head.first().getValueLength(), value2, 0, value2.length)); } } \ No newline at end of file diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index fce81fc8b76..5b8616990d5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -207,8 +207,8 @@ public class TestCompoundBloomFilter { // Test for false negatives (not allowed). int numChecked = 0; for (KeyValue kv : kvs) { - byte[] row = kv.getRow(); - boolean present = isInBloom(scanner, row, kv.getQualifier()); + byte[] row = CellUtil.cloneRow(kv); + boolean present = isInBloom(scanner, row, CellUtil.cloneQualifier(kv)); assertTrue(testIdMsg + " Bloom filter false negative on row " + Bytes.toStringBinary(row) + " after " + numChecked + " successful checks", present); @@ -358,9 +358,10 @@ public class TestCompoundBloomFilter { KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey); KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey); assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp()); - assertEquals(Bytes.toStringBinary(rowKV.getRow()), - Bytes.toStringBinary(rowColKV.getRow())); - assertEquals(0, rowKV.getQualifier().length); + assertEquals(Bytes.toStringBinary(rowKV.getRowArray(), rowKV.getRowOffset(), + rowKV.getRowLength()), Bytes.toStringBinary(rowColKV.getRowArray(), rowColKV.getRowOffset(), + rowColKV.getRowLength())); + assertEquals(0, rowKV.getQualifierLength()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java index 78575903cfc..4848d6609ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java @@ -70,7 +70,7 @@ public class TestDefaultMemStore extends TestCase { private static final int QUALIFIER_COUNT = ROW_COUNT; private static final byte [] FAMILY = Bytes.toBytes("column"); private MultiVersionConsistencyControl mvcc; - private AtomicLong startSeqNum = new AtomicLong(0); + private AtomicLong startSeqNum = new AtomicLong(0); @Override public void setUp() throws Exception { @@ -88,7 +88,9 @@ public class TestDefaultMemStore extends TestCase { this.memstore.add(samekey); Cell found = this.memstore.cellSet.first(); assertEquals(1, this.memstore.cellSet.size()); - assertTrue(Bytes.toString(found.getValue()), CellUtil.matchingValue(samekey, found)); + assertTrue( + Bytes.toString(found.getValueArray(), found.getValueOffset(), found.getValueLength()), + CellUtil.matchingValue(samekey, found)); } /** @@ -179,7 +181,7 @@ public class TestDefaultMemStore extends TestCase { /** * A simple test which verifies the 3 possible states when scanning across snapshot. * @throws IOException - * @throws CloneNotSupportedException + * @throws CloneNotSupportedException */ public void testScanAcrossSnapshot2() throws IOException, CloneNotSupportedException { // we are going to the scanning across snapshot with two kvs @@ -843,7 +845,7 @@ public class TestDefaultMemStore extends TestCase { assert(newSize > oldSize); //The kv1 should be removed. assert(memstore.cellSet.size() == 2); - + KeyValue kv4 = KeyValueTestUtil.create("r", "f", "q", 104, "v"); kv4.setSequenceId(1); l.clear(); l.add(kv4); @@ -855,12 +857,12 @@ public class TestDefaultMemStore extends TestCase { } //////////////////////////////////// - // Test for periodic memstore flushes + // Test for periodic memstore flushes // based on time of oldest edit //////////////////////////////////// /** - * Tests that the timeOfOldestEdit is updated correctly for the + * Tests that the timeOfOldestEdit is updated correctly for the * various edit operations in memstore. * @throws Exception */ @@ -876,7 +878,7 @@ public class TestDefaultMemStore extends TestCase { memstore.add(KeyValueTestUtil.create("r", "f", "q", 100, "v")); t = memstore.timeOfOldestEdit(); assertTrue(t == 1234); - // snapshot() will reset timeOfOldestEdit. The method will also assert the + // snapshot() will reset timeOfOldestEdit. The method will also assert the // value is reset to Long.MAX_VALUE t = runSnapshot(memstore); @@ -903,7 +905,7 @@ public class TestDefaultMemStore extends TestCase { * Tests the HRegion.shouldFlush method - adds an edit in the memstore * and checks that shouldFlush returns true, and another where it disables * the periodic flush functionality and tests whether shouldFlush returns - * false. + * false. * @throws Exception */ public void testShouldFlush() throws Exception { @@ -973,7 +975,7 @@ public class TestDefaultMemStore extends TestCase { long t = 1234; @Override public long currentTime() { - return t; + return t; } public void setCurrentTimeMillis(long t) { this.t = t; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 23f5e48e5df..56a9d4b6dea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -3973,8 +3973,8 @@ public class TestHRegion { if (previousKV != null) { if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) { LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV - + "(memStoreTS:" + previousKV.getMvccVersion() + ")" + ", New KV: " + kv - + "(memStoreTS:" + kv.getMvccVersion() + ")"); + + "(memStoreTS:" + previousKV.getSequenceId() + ")" + ", New KV: " + kv + + "(memStoreTS:" + kv.getSequenceId() + ")"); assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue)); } } @@ -5132,17 +5132,20 @@ public class TestHRegion { List currRow = new ArrayList(); boolean hasNext = scanner.next(currRow); assertEquals(2, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowC, 0, rowC.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowB, 0, rowB.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowA, 0, rowA.length)); assertFalse(hasNext); scanner.close(); } finally { @@ -5189,17 +5192,20 @@ public class TestHRegion { InternalScanner scanner = region.getScanner(scan); boolean hasNext = scanner.next(currRow); assertEquals(2, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowC, 0, rowC.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowB, 0, rowB.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowA, 0, rowA.length)); assertFalse(hasNext); scanner.close(); } finally { @@ -5243,17 +5249,20 @@ public class TestHRegion { InternalScanner scanner = region.getScanner(scan); boolean hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowC, 0, rowC.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowB, 0, rowB.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowA, 0, rowA.length)); assertFalse(hasNext); scanner.close(); } finally { @@ -5311,17 +5320,20 @@ public class TestHRegion { InternalScanner scanner = region.getScanner(scan); boolean hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowD, 0, rowD.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowC, 0, rowC.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowB, 0, rowB.length)); assertFalse(hasNext); scanner.close(); @@ -5332,7 +5344,8 @@ public class TestHRegion { scanner = region.getScanner(scan); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowD, 0, rowD.length)); scanner.close(); } finally { HBaseTestingUtility.closeRegionAndWAL(this.region); @@ -5391,17 +5404,20 @@ public class TestHRegion { InternalScanner scanner = region.getScanner(scan); boolean hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowD, 0, rowD.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowC, 0, rowC.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowB, 0, rowB.length)); assertFalse(hasNext); scanner.close(); @@ -5412,7 +5428,8 @@ public class TestHRegion { scanner = region.getScanner(scan); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), rowD, 0, rowD.length)); scanner.close(); } finally { HBaseTestingUtility.closeRegionAndWAL(this.region); @@ -5536,42 +5553,49 @@ public class TestHRegion { // "row4" takes 2 next() calls since batch=3 hasNext = scanner.next(currRow); assertEquals(3, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row4, 0, row4.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(2, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow.get(0).getRowLength(), row4, 0, + row4.length)); assertTrue(hasNext); // 2. scan out "row3" (2 kv) currRow.clear(); hasNext = scanner.next(currRow); assertEquals(2, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row3)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row3, 0, row3.length)); assertTrue(hasNext); // 3. scan out "row2" (4 kvs) // "row2" takes 2 next() calls since batch=3 currRow.clear(); hasNext = scanner.next(currRow); assertEquals(3, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row2, 0, row2.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row2, 0, row2.length)); assertTrue(hasNext); // 4. scan out "row1" (2 kv) currRow.clear(); hasNext = scanner.next(currRow); assertEquals(2, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row1, 0, row1.length)); assertTrue(hasNext); // 5. scan out "row0" (1 kv) currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row0)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row0, 0, row0.length)); assertFalse(hasNext); scanner.close(); @@ -5632,22 +5656,26 @@ public class TestHRegion { List currRow = new ArrayList(); boolean hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row4, 0, row4.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row3)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row3, 0, row3.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row2, 0, row2.length)); assertTrue(hasNext); currRow.clear(); hasNext = scanner.next(currRow); assertEquals(1, currRow.size()); - assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); + assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow + .get(0).getRowLength(), row1, 0, row1.length)); assertFalse(hasNext); } finally { HBaseTestingUtility.closeRegionAndWAL(this.region); @@ -5699,7 +5727,8 @@ public class TestHRegion { int verify = startRow + 2 * numRows - 1; do { more = scanner.next(currRow); - assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); + assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), + currRow.get(0).getRowLength()), verify + ""); verify--; currRow.clear(); } while(more); @@ -5712,7 +5741,8 @@ public class TestHRegion { verify = startRow + 2 * numRows - 1; do { more = scanner.next(currRow); - assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); + assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), + currRow.get(0).getRowLength()), verify + ""); verify--; currRow.clear(); } while(more); @@ -5725,7 +5755,8 @@ public class TestHRegion { verify = startRow + numRows - 1; do { more = scanner.next(currRow); - assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); + assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), + currRow.get(0).getRowLength()), verify + ""); verify--; currRow.clear(); } while(more); @@ -5738,7 +5769,8 @@ public class TestHRegion { verify = startRow + numRows - 1; do { more = scanner.next(currRow); - assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); + assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), + currRow.get(0).getRowLength()), verify + ""); verify--; currRow.clear(); } while(more); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index 70ae657bdbf..c065ee78beb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -279,7 +279,7 @@ public class TestHRegionReplayEvents { if (WALEdit.isMetaEditFamily(entry.getEdit().getCells().get(0))) { return 0; // handled elsewhere } - Put put = new Put(entry.getEdit().getCells().get(0).getRow()); + Put put = new Put(CellUtil.cloneRow(entry.getEdit().getCells().get(0))); for (Cell cell : entry.getEdit().getCells()) put.add(cell); put.setDurability(Durability.SKIP_WAL); MutationReplay mutation = new MutationReplay(MutationType.PUT, put, 0, 0); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java index c463a238968..aaf20da8ce9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMajorCompaction.java @@ -76,7 +76,7 @@ public class TestMajorCompaction { private static final Log LOG = LogFactory.getLog(TestMajorCompaction.class.getName()); private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU(); protected Configuration conf = UTIL.getConfiguration(); - + private Region r = null; private HTableDescriptor htd = null; private static final byte [] COLUMN_FAMILY = fam1; @@ -355,7 +355,7 @@ public class TestMajorCompaction { HFileScanner scanner = f.getReader().getScanner(false, false); scanner.seekTo(); do { - byte [] row = scanner.getCell().getRow(); + byte [] row = CellUtil.cloneRow(scanner.getCell()); if (Bytes.equals(row, STARTROW)) { count1++; } else if(Bytes.equals(row, secondRowBytes)) { @@ -434,7 +434,7 @@ public class TestMajorCompaction { assertNotNull("Expected to receive a compaction request", request); assertEquals( "User-requested major compaction should always occur, even if there are too many store files", - true, + true, request.isMajor()); } @@ -457,7 +457,7 @@ public class TestMajorCompaction { List results = new ArrayList(); boolean result = s.next(results); assertTrue(!results.isEmpty()); - r.delete(new Delete(results.get(0).getRow())); + r.delete(new Delete(CellUtil.cloneRow(results.get(0)))); if (!result) break; } while (true); s.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java index ef7f1056e7c..33ae07bfda2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerWithBulkload.java @@ -21,38 +21,38 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.CountDownLatch; -import junit.framework.Assert; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.RegionLocator; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; +import junit.framework.Assert; + @Category({RegionServerTests.class, MediumTests.class}) public class TestScannerWithBulkload { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -100,11 +100,16 @@ public class TestScannerWithBulkload { while (result != null) { List cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); for (Cell _c : cells) { - if (Bytes.toString(_c.getRow()).equals("row1")) { - System.out.println(Bytes.toString(_c.getRow())); - System.out.println(Bytes.toString(_c.getQualifier())); - System.out.println(Bytes.toString(_c.getValue())); - Assert.assertEquals("version3", Bytes.toString(_c.getValue())); + if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()) + .equals("row1")) { + System.out + .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())); + System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(), + _c.getQualifierLength())); + System.out.println( + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); + Assert.assertEquals("version3", + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); } } result = scanner.next(); @@ -118,11 +123,16 @@ public class TestScannerWithBulkload { while (result != null) { List cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); for (Cell _c : cells) { - if (Bytes.toString(_c.getRow()).equals("row1")) { - System.out.println(Bytes.toString(_c.getRow())); - System.out.println(Bytes.toString(_c.getQualifier())); - System.out.println(Bytes.toString(_c.getValue())); - Assert.assertEquals(expctedVal, Bytes.toString(_c.getValue())); + if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()) + .equals("row1")) { + System.out + .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())); + System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(), + _c.getQualifierLength())); + System.out.println( + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); + Assert.assertEquals(expctedVal, + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); } } result = scanner.next(); @@ -191,7 +201,9 @@ public class TestScannerWithBulkload { Result result = scanner.next(); List cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); Assert.assertEquals(1, cells.size()); - Assert.assertEquals("version1", Bytes.toString(cells.get(0).getValue())); + Cell _c = cells.get(0); + Assert.assertEquals("version1", + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); scanner.close(); return table; } @@ -270,11 +282,16 @@ public class TestScannerWithBulkload { while (result != null) { List cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); for (Cell _c : cells) { - if (Bytes.toString(_c.getRow()).equals("row1")) { - System.out.println(Bytes.toString(_c.getRow())); - System.out.println(Bytes.toString(_c.getQualifier())); - System.out.println(Bytes.toString(_c.getValue())); - Assert.assertEquals("version3", Bytes.toString(_c.getValue())); + if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()) + .equals("row1")) { + System.out + .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())); + System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(), + _c.getQualifierLength())); + System.out.println( + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); + Assert.assertEquals("version3", + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())); } } result = scanner.next(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index 36d3b2d2a60..b763a22b0c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -34,14 +34,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.HFileLink; @@ -54,6 +53,8 @@ import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ChecksumType; @@ -171,9 +172,9 @@ public class TestStoreFile extends HBaseTestCase { // may be in middle of row. Create new one with empty column and // timestamp. Cell kv = reader.midkey(); - byte [] midRow = kv.getRow(); + byte [] midRow = CellUtil.cloneRow(kv); kv = reader.getLastKey(); - byte [] finalRow = kv.getRow(); + byte [] finalRow = CellUtil.cloneRow(kv); // Make a reference HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow); Path refPath = splitStoreFile(regionFs, splitHri, TEST_FAMILY, hsf, midRow, true); @@ -186,11 +187,13 @@ public class TestStoreFile extends HBaseTestCase { ByteBuffer bb = ByteBuffer.wrap(((KeyValue) s.getKey()).getKey()); kv = KeyValueUtil.createKeyValueFromKey(bb); if (first) { - assertTrue(Bytes.equals(kv.getRow(), midRow)); + assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), midRow, 0, + midRow.length)); first = false; } } - assertTrue(Bytes.equals(kv.getRow(), finalRow)); + assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), finalRow, 0, + finalRow.length)); } @Test @@ -301,7 +304,7 @@ public class TestStoreFile extends HBaseTestCase { // Now confirm that I can read from the ref to link HFileScanner sB = hsfB.createReader().getScanner(false, false); sB.seekTo(); - + //count++ as seekTo() will advance the scanner count++; while (sB.next()) { @@ -316,7 +319,7 @@ public class TestStoreFile extends HBaseTestCase { throws IOException { Cell midkey = f.createReader().midkey(); KeyValue midKV = (KeyValue)midkey; - byte [] midRow = midKV.getRow(); + byte [] midRow = CellUtil.cloneRow(midKV); // Create top split. HRegionInfo topHri = new HRegionInfo(regionFs.getRegionInfo().getTable(), null, midRow); @@ -384,9 +387,9 @@ public class TestStoreFile extends HBaseTestCase { assertTrue(fs.exists(f.getPath())); topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true); bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false); - + assertNull(bottomPath); - + top = new StoreFile(this.fs, topPath, conf, cacheConf, BloomType.NONE).createReader(); // Now read from the top. first = true; @@ -402,7 +405,8 @@ public class TestStoreFile extends HBaseTestCase { first = false; KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("First top when key < bottom: " + keyKV); - String tmp = Bytes.toString(keyKV.getRow()); + String tmp = + Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength()); for (int i = 0; i < tmp.length(); i++) { assertTrue(tmp.charAt(i) == 'a'); } @@ -410,7 +414,7 @@ public class TestStoreFile extends HBaseTestCase { } KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("Last top when key < bottom: " + keyKV); - String tmp = Bytes.toString(keyKV.getRow()); + String tmp = Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength()); for (int i = 0; i < tmp.length(); i++) { assertTrue(tmp.charAt(i) == 'z'); } @@ -434,7 +438,7 @@ public class TestStoreFile extends HBaseTestCase { first = false; keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("First bottom when key > top: " + keyKV); - tmp = Bytes.toString(keyKV.getRow()); + tmp = Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength()); for (int i = 0; i < tmp.length(); i++) { assertTrue(tmp.charAt(i) == 'a'); } @@ -443,7 +447,8 @@ public class TestStoreFile extends HBaseTestCase { keyKV = KeyValueUtil.createKeyValueFromKey(key); LOG.info("Last bottom when key > top: " + keyKV); for (int i = 0; i < tmp.length(); i++) { - assertTrue(Bytes.toString(keyKV.getRow()).charAt(i) == 'z'); + assertTrue(Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength()) + .charAt(i) == 'z'); } } finally { if (top != null) { @@ -500,7 +505,7 @@ public class TestStoreFile extends HBaseTestCase { + ", expected no more than " + maxFalsePos + ")", falsePos <= maxFalsePos); } - + private static final int BLOCKSIZE_SMALL = 8192; @Test @@ -909,7 +914,7 @@ public class TestStoreFile extends HBaseTestCase { KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1); KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2); assertTrue(Bytes.compareTo( - keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(), + keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(), keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0); assertTrue(Bytes.compareTo( kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java index 3c247a9c43c..eef229f6309 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeCompactor.java @@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -175,7 +176,7 @@ public class TestStripeCompactor { List boundaries = new ArrayList(); boundaries.add(left); for (int i = 1; i < output.length; ++i) { - boundaries.add(output[i][0].getRow()); + boundaries.add(CellUtil.cloneRow(output[i][0])); } boundaries.add(right); writers.verifyBoundaries(boundaries.toArray(new byte[][] {})); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java index 9c99a436236..32e585599f7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTags.java @@ -34,14 +34,11 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Durability; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; @@ -56,6 +53,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.AfterClass; @@ -568,15 +567,16 @@ public class TestTags { for (Cell cell : edits) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); if (cf == null) { - cf = kv.getFamily(); + cf = CellUtil.cloneFamily(kv); } Tag tag = new Tag((byte) 1, attribute); List tagList = new ArrayList(); tagList.add(tag); - KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, - kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), - kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, + KeyValue newKV = new KeyValue(CellUtil.cloneRow(kv), 0, kv.getRowLength(), + CellUtil.cloneFamily(kv), 0, kv.getFamilyLength(), CellUtil.cloneQualifier(kv), 0, + kv.getQualifierLength(), kv.getTimestamp(), + KeyValue.Type.codeToType(kv.getTypeByte()), CellUtil.cloneValue(kv), 0, kv.getValueLength(), tagList); ((List) updatedCells).add(newKV); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java index 2c75f234beb..651f7b25b1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java @@ -510,7 +510,8 @@ public class TestLogRolling { while ((entry = reader.next()) != null) { LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getCells()); for (Cell cell : entry.getEdit().getCells()) { - loggedRows.add(Bytes.toStringBinary(cell.getRow())); + loggedRows.add(Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength())); } } } catch (EOFException e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java index c1df2259b31..e6237f8431f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestProtobufLog.java @@ -26,12 +26,11 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -62,8 +61,6 @@ import org.junit.rules.TestName; */ @Category({RegionServerTests.class, MediumTests.class}) public class TestProtobufLog { - private static final Log LOG = LogFactory.getLog(TestProtobufLog.class); - protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected FileSystem fs; @@ -189,9 +186,10 @@ public class TestProtobufLog { assertEquals(tableName, entry.getKey().getTablename()); int idx = 0; for (Cell val : entry.getEdit().getCells()) { - assertTrue(Bytes.equals(row, val.getRow())); + assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(), + val.getRowLength())); String value = i + "" + idx; - assertArrayEquals(Bytes.toBytes(value), val.getValue()); + assertArrayEquals(Bytes.toBytes(value), CellUtil.cloneValue(val)); idx++; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java index f1e956ceffa..7f5b59cb3c1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWithTags.java @@ -29,6 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -43,8 +44,6 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; @@ -208,15 +207,16 @@ public class TestReplicationWithTags { for (Cell cell : edits) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); if (cf == null) { - cf = kv.getFamily(); + cf = CellUtil.cloneFamily(kv); } Tag tag = new Tag(TAG_TYPE, attribute); List tagList = new ArrayList(); tagList.add(tag); - KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, - kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), - kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, + KeyValue newKV = new KeyValue(CellUtil.cloneRow(kv), 0, kv.getRowLength(), + CellUtil.cloneFamily(kv), 0, kv.getFamilyLength(), CellUtil.cloneQualifier(kv), 0, + kv.getQualifierLength(), kv.getTimestamp(), + KeyValue.Type.codeToType(kv.getTypeByte()), CellUtil.cloneValue(kv), 0, kv.getValueLength(), tagList); ((List) updatedCells).add(newKV); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java index fe210707eb8..08479bec7c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.replication.regionserver; import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion; import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.openRegion; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -28,9 +28,9 @@ import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicLong; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; @@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.RegionLocations; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; import org.apache.hadoop.hbase.client.Table; @@ -49,12 +48,9 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse; -import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster; -import org.apache.hadoop.hbase.wal.WAL.Entry; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContext; @@ -65,6 +61,8 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; +import org.apache.hadoop.hbase.wal.WAL.Entry; +import org.apache.hadoop.hbase.wal.WALKey; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -82,9 +80,6 @@ import com.google.common.collect.Lists; @Category({ReplicationTests.class, MediumTests.class}) public class TestRegionReplicaReplicationEndpointNoMaster { - private static final Log LOG = LogFactory.getLog( - TestRegionReplicaReplicationEndpointNoMaster.class); - private static final int NB_SERVERS = 2; private static TableName tableName = TableName.valueOf( TestRegionReplicaReplicationEndpointNoMaster.class.getSimpleName()); @@ -193,7 +188,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster { throws IOException, RuntimeException { Entry entry; while ((entry = entries.poll()) != null) { - byte[] row = entry.getEdit().getCells().get(0).getRow(); + byte[] row = CellUtil.cloneRow(entry.getEdit().getCells().get(0)); RegionLocations locations = connection.locateRegion(tableName, row, true, true); RegionReplicaReplayCallable callable = new RegionReplicaReplayCallable(connection, RpcControllerFactory.instantiate(connection.getConfiguration()), @@ -298,7 +293,9 @@ public class TestRegionReplicaReplicationEndpointNoMaster { Assert.assertEquals(1000, entries.size()); for (Entry e: entries) { - if (Integer.parseInt(Bytes.toString(e.getEdit().getCells().get(0).getValue())) % 2 == 0) { + Cell _c = e.getEdit().getCells().get(0); + if (Integer.parseInt( + Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())) % 2 == 0) { e.getKey().setOrigLogSeqNum(1); // simulate dist log replay by setting orig seq id } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java index 3487d15f8a7..39b7f1b5448 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestDefaultScanLabelGeneratorStack.java @@ -137,20 +137,26 @@ public class TestDefaultScanLabelGeneratorStack { Cell current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q1)); - assertTrue(Bytes.equals(current.getValue(), value1)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q1, 0, Q1.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value1, 0, value1.length)); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), - current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q2)); - assertTrue(Bytes.equals(current.getValue(), value2)); + current.getRowLength(), ROW_1, 0, ROW_1.length)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q2, 0, Q2.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value2, 0, value2.length)); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), - current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q3)); - assertTrue(Bytes.equals(current.getValue(), value3)); + current.getRowLength(), ROW_1, 0, ROW_1.length)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value3, 0, value3.length)); return null; } @@ -173,15 +179,19 @@ public class TestDefaultScanLabelGeneratorStack { // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q2)); - assertTrue(Bytes.equals(current.getValue(), value2)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q2, 0, Q2.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value2, 0, value2.length)); cellScanner.advance(); current = cellScanner.current(); // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q3)); - assertTrue(Bytes.equals(current.getValue(), value3)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value3, 0, value3.length)); // Test scan with correct auth attribute for test user Scan s1 = new Scan(); @@ -198,15 +208,19 @@ public class TestDefaultScanLabelGeneratorStack { // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), current1.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current1.getQualifier(), Q2)); - assertTrue(Bytes.equals(current1.getValue(), value2)); + assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(), + current1.getQualifierLength(), Q2, 0, Q2.length)); + assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(), + current1.getValueLength(), value2, 0, value2.length)); cellScanner1.advance(); current1 = cellScanner1.current(); // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), current1.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current1.getQualifier(), Q3)); - assertTrue(Bytes.equals(current1.getValue(), value3)); + assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(), + current1.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(), + current1.getValueLength(), value3, 0, value3.length)); // Test scan with incorrect auth attribute for test user Scan s2 = new Scan(); @@ -221,8 +235,10 @@ public class TestDefaultScanLabelGeneratorStack { // This scan will only see value3 (no label) assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(), current2.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current2.getQualifier(), Q3)); - assertTrue(Bytes.equals(current2.getValue(), value3)); + assertTrue(Bytes.equals(current2.getQualifierArray(), current2.getQualifierOffset(), + current2.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current2.getValueArray(), current2.getValueOffset(), + current2.getValueLength(), value3, 0, value3.length)); assertFalse(cellScanner2.advance()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java index a945a9e060d..7fa240e6c20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsReplication.java @@ -50,7 +50,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -406,7 +405,7 @@ public class TestVisibilityLabelsReplication { for (Cell cell : edits) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); if (cf == null) { - cf = kv.getFamily(); + cf = CellUtil.cloneFamily(kv); } Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute); List tagList = new ArrayList(); @@ -414,10 +413,6 @@ public class TestVisibilityLabelsReplication { tagList.addAll(kv.getTags()); byte[] fromList = Tag.fromList(tagList); TagRewriteCell newcell = new TagRewriteCell(kv, fromList); - KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, - kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), - kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, - kv.getValueLength(), tagList); ((List) updatedCells).add(newcell); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java index 76ea96cee07..6c1a47b78f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java @@ -151,20 +151,26 @@ public class TestVisibilityLablesWithGroups { Cell current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q1)); - assertTrue(Bytes.equals(current.getValue(), value1)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q1, 0, Q1.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value1, 0, value1.length)); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q2)); - assertTrue(Bytes.equals(current.getValue(), value2)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q2, 0, Q2.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value2, 0, value2.length)); cellScanner.advance(); current = cellScanner.current(); assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q3)); - assertTrue(Bytes.equals(current.getValue(), value3)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value3, 0, value3.length)); } return null; } @@ -206,15 +212,19 @@ public class TestVisibilityLablesWithGroups { // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q2)); - assertTrue(Bytes.equals(current.getValue(), value2)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q2, 0, Q2.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value2, 0, value2.length)); cellScanner.advance(); current = cellScanner.current(); // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), current.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current.getQualifier(), Q3)); - assertTrue(Bytes.equals(current.getValue(), value3)); + assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(), + current.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(), + current.getValueLength(), value3, 0, value3.length)); // Test scan with correct auth attribute for test user Scan s1 = new Scan(); @@ -231,15 +241,19 @@ public class TestVisibilityLablesWithGroups { // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), current1.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current1.getQualifier(), Q2)); - assertTrue(Bytes.equals(current1.getValue(), value2)); + assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(), + current1.getQualifierLength(), Q2, 0, Q2.length)); + assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(), + current1.getValueLength(), value2, 0, value2.length)); cellScanner1.advance(); current1 = cellScanner1.current(); // test user can see value2 (CONFIDENTIAL) and value3 (no label) assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), current1.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current1.getQualifier(), Q3)); - assertTrue(Bytes.equals(current1.getValue(), value3)); + assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(), + current1.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(), + current1.getValueLength(), value3, 0, value3.length)); // Test scan with incorrect auth attribute for test user Scan s2 = new Scan(); @@ -254,8 +268,10 @@ public class TestVisibilityLablesWithGroups { // This scan will only see value3 (no label) assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(), current2.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current2.getQualifier(), Q3)); - assertTrue(Bytes.equals(current2.getValue(), value3)); + assertTrue(Bytes.equals(current2.getQualifierArray(), current2.getQualifierOffset(), + current2.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current2.getValueArray(), current2.getValueOffset(), + current2.getValueLength(), value3, 0, value3.length)); assertFalse(cellScanner2.advance()); } @@ -315,8 +331,10 @@ public class TestVisibilityLablesWithGroups { // test user can only see value3 (no label) assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), current1.getRowLength(), ROW_1, 0, ROW_1.length)); - assertTrue(Bytes.equals(current1.getQualifier(), Q3)); - assertTrue(Bytes.equals(current1.getValue(), value3)); + assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(), + current1.getQualifierLength(), Q3, 0, Q3.length)); + assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(), + current1.getValueLength(), value3, 0, value3.length)); assertFalse(cellScanner1.advance()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java index 866382dda2d..451ccee2f2f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java @@ -29,24 +29,22 @@ import java.util.List; import java.util.Map; import java.util.NavigableSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; -import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; @@ -55,9 +53,9 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner; +import org.apache.hadoop.hbase.regionserver.ScanInfo; import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.Store; -import org.apache.hadoop.hbase.regionserver.ScanInfo; import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -66,7 +64,6 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; - import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; @@ -74,7 +71,6 @@ import org.junit.runners.Parameterized.Parameters; @Category({MiscTests.class, MediumTests.class}) @RunWith(Parameterized.class) public class TestCoprocessorScanPolicy { - private static final Log LOG = LogFactory.getLog(TestCoprocessorScanPolicy.class); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte[] F = Bytes.toBytes("fam"); private static final byte[] Q = Bytes.toBytes("qual"); @@ -229,12 +225,16 @@ public class TestCoprocessorScanPolicy { if (put.getAttribute("ttl") != null) { Cell cell = put.getFamilyCellMap().values().iterator().next().get(0); KeyValue kv = KeyValueUtil.ensureKeyValue(cell); - ttls.put(TableName.valueOf(kv.getQualifier()), Bytes.toLong(kv.getValue())); + ttls.put(TableName.valueOf( + Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())), + Bytes.toLong(CellUtil.cloneValue(kv))); c.bypass(); } else if (put.getAttribute("versions") != null) { Cell cell = put.getFamilyCellMap().values().iterator().next().get(0); KeyValue kv = KeyValueUtil.ensureKeyValue(cell); - versions.put(TableName.valueOf(kv.getQualifier()), Bytes.toInt(kv.getValue())); + versions.put(TableName.valueOf( + Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())), + Bytes.toInt(CellUtil.cloneValue(kv))); c.bypass(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java index 6ae8ee1532c..847b206d980 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java @@ -39,19 +39,16 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; +import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader; +import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.log4j.Level; - -// imports for things that haven't moved from regionserver.wal yet. -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader; -import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter; - import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -123,12 +120,12 @@ public class TestSecureWAL { List cells = entry.getEdit().getCells(); assertTrue("Should be one KV per WALEdit", cells.size() == 1); for (Cell cell: cells) { - byte[] thisRow = cell.getRow(); - assertTrue("Incorrect row", Bytes.equals(thisRow, row)); - byte[] thisFamily = cell.getFamily(); - assertTrue("Incorrect family", Bytes.equals(thisFamily, family)); - byte[] thisValue = cell.getValue(); - assertTrue("Incorrect value", Bytes.equals(thisValue, value)); + assertTrue("Incorrect row", Bytes.equals(cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength(), row, 0, row.length)); + assertTrue("Incorrect family", Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(), + cell.getFamilyLength(), family, 0, family.length)); + assertTrue("Incorrect value", Bytes.equals(cell.getValueArray(), cell.getValueOffset(), + cell.getValueLength(), value, 0, value.length)); } } assertEquals("Should have read back as many KVs as written", total, count); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java index c84794571c2..e393c1365e6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -51,6 +52,12 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver; +import org.apache.hadoop.hbase.regionserver.wal.HLogKey; +import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader; +import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter; +import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; +import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; @@ -68,14 +75,6 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -// imports for things that haven't moved from regionserver.wal yet. -import org.apache.hadoop.hbase.regionserver.wal.HLogKey; -import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader; -import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter; -import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; -import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; - /** * WAL tests that can be reused across providers. */ @@ -521,8 +520,9 @@ public class TestWALFactory { assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName())); assertTrue(htd.getTableName().equals(key.getTablename())); Cell cell = val.getCells().get(0); - assertTrue(Bytes.equals(row, cell.getRow())); - assertEquals((byte)(i + '0'), cell.getValue()[0]); + assertTrue(Bytes.equals(row, 0, row.length, cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength())); + assertEquals((byte)(i + '0'), CellUtil.cloneValue(cell)[0]); System.out.println(key + " " + val); } } finally { @@ -574,8 +574,9 @@ public class TestWALFactory { assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(), entry.getKey().getEncodedRegionName())); assertTrue(htd.getTableName().equals(entry.getKey().getTablename())); - assertTrue(Bytes.equals(row, val.getRow())); - assertEquals((byte)(idx + '0'), val.getValue()[0]); + assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(), + val.getRowLength())); + assertEquals((byte) (idx + '0'), CellUtil.cloneValue(val)[0]); System.out.println(entry.getKey() + " " + val); idx++; } @@ -687,9 +688,10 @@ public class TestWALFactory { assertEquals(tableName, entry.getKey().getTablename()); int idx = 0; for (Cell val : entry.getEdit().getCells()) { - assertTrue(Bytes.equals(row, val.getRow())); + assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(), + val.getRowLength())); String value = i + "" + idx; - assertArrayEquals(Bytes.toBytes(value), val.getValue()); + assertArrayEquals(Bytes.toBytes(value), CellUtil.cloneValue(val)); idx++; } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java index e263cdb5578..0fd44f5dc3d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java @@ -41,13 +41,6 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.impl.Log4JLogger; -import org.apache.hadoop.hbase.testclassification.RegionServerTests; -import org.apache.hadoop.hbase.TableName; -import org.apache.log4j.Level; -import org.apache.hadoop.hdfs.server.datanode.DataNode; -import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; -import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -62,18 +55,24 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.wal.WAL.Entry; -import org.apache.hadoop.hbase.wal.WAL.Reader; -import org.apache.hadoop.hbase.wal.WALProvider.Writer; -import org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException; +import org.apache.hadoop.hbase.regionserver.wal.FaultySequenceFileLogReader; +import org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter; +import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; +import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Threads; +import org.apache.hadoop.hbase.wal.WAL.Entry; +import org.apache.hadoop.hbase.wal.WAL.Reader; +import org.apache.hadoop.hbase.wal.WALProvider.Writer; +import org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.ipc.RemoteException; @@ -82,9 +81,9 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; -import org.junit.rules.TestName; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -92,12 +91,6 @@ import org.mockito.stubbing.Answer; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; -// imports for things that haven't moved from regionserver.wal yet. -import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter; -import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; -import org.apache.hadoop.hbase.regionserver.wal.FaultySequenceFileLogReader; - /** * Testing {@link WAL} splitting code. */ @@ -187,7 +180,7 @@ public class TestWALSplit { REGIONS.clear(); Collections.addAll(REGIONS, "bbb", "ccc"); InstrumentedLogWriter.activateFailure = false; - this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? + this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING); wals = new WALFactory(conf, null, name.getMethodName()); WALDIR = new Path(HBASEDIR, DefaultWALProvider.getWALDirectoryName(name.getMethodName())); @@ -957,7 +950,8 @@ public class TestWALSplit { Cell cell = cells.get(0); // Check that the edits come in the right order. - assertEquals(expectedIndex, Bytes.toInt(cell.getRow())); + assertEquals(expectedIndex, Bytes.toInt(cell.getRowArray(), cell.getRowOffset(), + cell.getRowLength())); expectedIndex++; return null; } diff --git a/hbase-shell/src/main/ruby/hbase/table.rb b/hbase-shell/src/main/ruby/hbase/table.rb index 5d0d8cf4082..15bebcd6620 100644 --- a/hbase-shell/src/main/ruby/hbase/table.rb +++ b/hbase-shell/src/main/ruby/hbase/table.rb @@ -224,7 +224,8 @@ EOF # Fetch cell value cell = result.listCells[0] - org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue) + org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray, + cell.getValueOffset, cell.getValueLength) end #---------------------------------------------------------------------------------------------- @@ -371,8 +372,10 @@ EOF # Print out results. Result can be Cell or RowResult. res = {} result.listCells.each do |c| - family = String.from_java_bytes(c.getFamily) - qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifier) + family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray, + c.getFamilyOffset, c.getFamilyLength) + qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray, + c.getQualifierOffset, c.getQualifierLength) column = "#{family}:#{qualifier}" value = to_string(column, c, maxlength) @@ -403,7 +406,8 @@ EOF # Fetch cell value cell = result.listCells[0] - org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue) + org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray, + cell.getValueOffset, cell.getValueLength) end def _hash_to_scan(args) @@ -505,8 +509,10 @@ EOF key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow) row.listCells.each do |c| - family = String.from_java_bytes(c.getFamily) - qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifier) + family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray, + c.getFamilyOffset, c.getFamilyLength) + qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray, + c.getQualifierOffset, c.getQualifierLength) column = "#{family}:#{qualifier}" cell = to_string(column, c, maxlength) @@ -640,14 +646,17 @@ EOF def to_string(column, kv, maxlength = -1) if is_meta_table? if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB' - hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValue) + hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValueArray, + kv.getValueOffset, kv.getValueLength) return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString] end if column == 'info:serverstartcode' if kv.getValue.length > 0 - str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValue) + str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValueArray, + kv.getValueOffset, kv.getValueLength) else - str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValue) + str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValueArray, + kv.getValueOffset, kv.getValueLength) end return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val] end @@ -679,7 +688,7 @@ EOF end end method = eval(klazz_name).method(converter) - return method.call(kv.getValue) # apply the converter + return method.call(org.apache.hadoop.hbase.CellUtil.cloneValue(kv)) # apply the converter end # if the column spec contains CONVERTER information, to get rid of :CONVERTER info from column pair.