HBASE-14047 - Cleanup deprecated APIs from Cell class (Ashish Singhi)

This commit is contained in:
ramkrishna 2015-07-13 22:06:16 +05:30
parent 951ec7a0b7
commit a3d30892b4
76 changed files with 752 additions and 1047 deletions

View File

@ -138,12 +138,6 @@ public class TestPayloadCarryingRpcController {
return 0; return 0;
} }
@Override
public long getMvccVersion() {
// unused
return 0;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
// unused // unused
@ -182,30 +176,6 @@ public class TestPayloadCarryingRpcController {
// unused // unused
return null; return null;
} }
@Override
public byte[] getValue() {
// unused
return null;
}
@Override
public byte[] getFamily() {
// unused
return null;
}
@Override
public byte[] getQualifier() {
// unused
return null;
}
@Override
public byte[] getRow() {
// unused
return null;
}
}; };
} }

View File

@ -44,12 +44,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* the goal of sorting newer cells first. * the goal of sorting newer cells first.
* </p> * </p>
* <p> * <p>
* This interface should not include methods that allocate new byte[]'s such as those used in client
* or debugging code. These users should use the methods found in the {@link CellUtil} class.
* Currently for to minimize the impact of existing applications moving between 0.94 and 0.96, we
* include the costly helper methods marked as deprecated.
* </p>
* <p>
* Cell implements Comparable&lt;Cell&gt; which is only meaningful when * Cell implements Comparable&lt;Cell&gt; which is only meaningful when
* comparing to other keys in the * comparing to other keys in the
* same table. It uses CellComparator which does not work on the -ROOT- and hbase:meta tables. * same table. It uses CellComparator which does not work on the -ROOT- and hbase:meta tables.
@ -146,19 +140,7 @@ public interface Cell {
byte getTypeByte(); byte getTypeByte();
//6) MvccVersion //6) SequenceId
/**
* @deprecated as of 1.0, use {@link Cell#getSequenceId()}
*
* Internal use only. A region-specific sequence ID given to each operation. It always exists for
* cells in the memstore but is not retained forever. It may survive several flushes, but
* generally becomes irrelevant after the cell's row is no longer involved in any operations that
* require strict consistency.
* @return mvccVersion (always &gt;= 0 if exists), or 0 if it no longer exists
*/
@Deprecated
long getMvccVersion();
/** /**
* A region-specific unique monotonically increasing sequence ID given to each Cell. It always * A region-specific unique monotonically increasing sequence ID given to each Cell. It always
@ -202,44 +184,4 @@ public interface Cell {
* @return the total length of the tags in the Cell. * @return the total length of the tags in the Cell.
*/ */
int getTagsLength(); int getTagsLength();
/**
* WARNING do not use, expensive. This gets an arraycopy of the cell's value.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#cloneValue(Cell)}
*/
@Deprecated
byte[] getValue();
/**
* WARNING do not use, expensive. This gets an arraycopy of the cell's family.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#cloneFamily(Cell)}
*/
@Deprecated
byte[] getFamily();
/**
* WARNING do not use, expensive. This gets an arraycopy of the cell's qualifier.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#cloneQualifier(Cell)}
*/
@Deprecated
byte[] getQualifier();
/**
* WARNING do not use, expensive. this gets an arraycopy of the cell's row.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#getRowByte(Cell, int)}
*/
@Deprecated
byte[] getRow();
} }

View File

@ -1335,11 +1335,6 @@ public final class CellUtil {
return 0; return 0;
} }
@Override
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return 0; return 0;
@ -1374,26 +1369,6 @@ public final class CellUtil {
public int getTagsLength() { public int getTagsLength() {
return 0; return 0;
} }
@Override
public byte[] getValue() {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] getFamily() {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] getQualifier() {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] getRow() {
return EMPTY_BYTE_ARRAY;
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1432,11 +1407,6 @@ public final class CellUtil {
public byte getTypeByte() { public byte getTypeByte() {
return Type.Maximum.getCode(); return Type.Maximum.getCode();
} }
@Override
public byte[] getRow() {
return Bytes.copy(this.rowArray, this.roffset, this.rlength);
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1488,16 +1458,6 @@ public final class CellUtil {
public int getQualifierLength() { public int getQualifierLength() {
return this.qlength; return this.qlength;
} }
@Override
public byte[] getFamily() {
return Bytes.copy(this.fArray, this.foffset, this.flength);
}
@Override
public byte[] getQualifier() {
return Bytes.copy(this.qArray, this.qoffset, this.qlength);
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1553,11 +1513,6 @@ public final class CellUtil {
public byte getTypeByte() { public byte getTypeByte() {
return Type.Minimum.getCode(); return Type.Minimum.getCode();
} }
@Override
public byte[] getRow() {
return Bytes.copy(this.rowArray, this.roffset, this.rlength);
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1609,15 +1564,5 @@ public final class CellUtil {
public int getQualifierLength() { public int getQualifierLength() {
return this.qlength; return this.qlength;
} }
@Override
public byte[] getFamily() {
return Bytes.copy(this.fArray, this.foffset, this.flength);
}
@Override
public byte[] getQualifier() {
return Bytes.copy(this.qArray, this.qoffset, this.qlength);
}
} }
} }

View File

@ -50,12 +50,13 @@ import com.google.common.annotations.VisibleForTesting;
/** /**
* An HBase Key/Value. This is the fundamental HBase Type. * An HBase Key/Value. This is the fundamental HBase Type.
* <p> * <p>
* HBase applications and users should use the Cell interface and avoid directly using KeyValue * HBase applications and users should use the Cell interface and avoid directly using KeyValue and
* and member functions not defined in Cell. * member functions not defined in Cell.
* <p> * <p>
* If being used client-side, the primary methods to access individual fields are {@link #getRow()}, * If being used client-side, the primary methods to access individual fields are
* {@link #getFamily()}, {@link #getQualifier()}, {@link #getTimestamp()}, and {@link #getValue()}. * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()},
* These methods allocate new byte arrays and return copies. Avoid their use server-side. * {@link #getTimestamp()}, and {@link #getValueArray()}. These methods allocate new byte arrays
* and return copies. Avoid their use server-side.
* <p> * <p>
* Instances of this class are immutable. They do not implement Comparable but Comparators are * Instances of this class are immutable. They do not implement Comparable but Comparators are
* provided. Comparators change with context, whether user table or a catalog table comparison. Its * provided. Comparators change with context, whether user table or a catalog table comparison. Its
@ -64,23 +65,20 @@ import com.google.common.annotations.VisibleForTesting;
* <p> * <p>
* KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start * KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start
* interpreting the content as KeyValue. The KeyValue format inside a byte array is: * interpreting the content as KeyValue. The KeyValue format inside a byte array is:
* <code>&lt;keylength&gt; &lt;valuelength&gt; &lt;key&gt; &lt;value&gt;</code> * <code>&lt;keylength&gt; &lt;valuelength&gt; &lt;key&gt; &lt;value&gt;</code> Key is further
* Key is further decomposed as: * decomposed as: <code>&lt;rowlength&gt; &lt;row&gt; &lt;columnfamilylength&gt;
* <code>&lt;rowlength&gt; &lt;row&gt; &lt;columnfamilylength&gt;
* &lt;columnfamily&gt; &lt;columnqualifier&gt; * &lt;columnfamily&gt; &lt;columnqualifier&gt;
* &lt;timestamp&gt; &lt;keytype&gt;</code> * &lt;timestamp&gt; &lt;keytype&gt;</code> The <code>rowlength</code> maximum is
* The <code>rowlength</code> maximum is <code>Short.MAX_SIZE</code>, column family length maximum * <code>Short.MAX_SIZE</code>, column family length maximum is <code>Byte.MAX_SIZE</code>, and
* is <code>Byte.MAX_SIZE</code>, and column qualifier + key length must be &lt; * column qualifier + key length must be &lt; <code>Integer.MAX_SIZE</code>. The column does not
* <code>Integer.MAX_SIZE</code>. The column does not contain the family/qualifier delimiter, * contain the family/qualifier delimiter, {@link #COLUMN_FAMILY_DELIMITER}<br>
* {@link #COLUMN_FAMILY_DELIMITER}<br>
* KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after * KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after
* the value part. The format for this part is: <code>&lt;tagslength&gt;&lt;tagsbytes&gt;</code>. * the value part. The format for this part is: <code>&lt;tagslength&gt;&lt;tagsbytes&gt;</code>.
* <code>tagslength</code> maximum is <code>Short.MAX_SIZE</code>. The <code>tagsbytes</code> * <code>tagslength</code> maximum is <code>Short.MAX_SIZE</code>. The <code>tagsbytes</code>
* contain one or more tags where as each tag is of the form * contain one or more tags where as each tag is of the form
* <code>&lt;taglength&gt;&lt;tagtype&gt;&lt;tagbytes&gt;</code>. * <code>&lt;taglength&gt;&lt;tagtype&gt;&lt;tagbytes&gt;</code>. <code>tagtype</code> is one byte
* <code>tagtype</code> is one byte and * and <code>taglength</code> maximum is <code>Short.MAX_SIZE</code> and it includes 1 byte type
* <code>taglength</code> maximum is <code>Short.MAX_SIZE</code> and it includes 1 byte type length * length and actual tag bytes length.
* and actual tag bytes length.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
@ -296,12 +294,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
/** Here be dragons **/ /** Here be dragons **/
// used to achieve atomic operations in the memstore.
@Override
public long getMvccVersion() {
return this.getSequenceId();
}
/** /**
* used to achieve atomic operations in the memstore. * used to achieve atomic operations in the memstore.
*/ */
@ -1172,9 +1164,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
*/ */
public Map<String, Object> toStringMap() { public Map<String, Object> toStringMap() {
Map<String, Object> stringMap = new HashMap<String, Object>(); Map<String, Object> stringMap = new HashMap<String, Object>();
stringMap.put("row", Bytes.toStringBinary(getRow())); stringMap.put("row", Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()));
stringMap.put("family", Bytes.toStringBinary(getFamily())); stringMap.put("family",
stringMap.put("qualifier", Bytes.toStringBinary(getQualifier())); Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength()));
stringMap.put("qualifier",
Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(), getQualifierLength()));
stringMap.put("timestamp", getTimestamp()); stringMap.put("timestamp", getTimestamp());
stringMap.put("vlen", getValueLength()); stringMap.put("vlen", getValueLength());
List<Tag> tags = getTags(); List<Tag> tags = getTags();
@ -1472,10 +1466,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
/** /**
* Do not use unless you have to. Used internally for compacting and testing. * Do not use unless you have to. Used internally for compacting and testing. Use
* * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()}, and
* Use {@link #getRow()}, {@link #getFamily()}, {@link #getQualifier()}, and * {@link #getValueArray()} if accessing a KeyValue client-side.
* {@link #getValue()} if accessing a KeyValue client-side.
* @return Copy of the key portion only. * @return Copy of the key portion only.
*/ */
public byte [] getKey() { public byte [] getKey() {
@ -1485,33 +1478,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return key; return key;
} }
/**
* Returns value in a new byte array.
* Primarily for use client-side. If server-side, use
* {@link #getBuffer()} with appropriate offsets and lengths instead to
* save on allocations.
* @return Value in a new byte array.
*/
@Override
@Deprecated // use CellUtil.getValueArray()
public byte [] getValue() {
return CellUtil.cloneValue(this);
}
/**
* Primarily for use client-side. Returns the row of this KeyValue in a new
* byte array.<p>
*
* If server-side, use {@link #getBuffer()} with appropriate offsets and
* lengths instead.
* @return Row in a new byte array.
*/
@Override
@Deprecated // use CellUtil.getRowArray()
public byte [] getRow() {
return CellUtil.cloneRow(this);
}
/** /**
* *
* @return Timestamp * @return Timestamp
@ -1556,35 +1522,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return KeyValue.isDelete(getType()); return KeyValue.isDelete(getType());
} }
/**
* Primarily for use client-side. Returns the family of this KeyValue in a
* new byte array.<p>
*
* If server-side, use {@link #getBuffer()} with appropriate offsets and
* lengths instead.
* @return Returns family. Makes a copy.
*/
@Override
@Deprecated // use CellUtil.getFamilyArray
public byte [] getFamily() {
return CellUtil.cloneFamily(this);
}
/**
* Primarily for use client-side. Returns the column qualifier of this
* KeyValue in a new byte array.<p>
*
* If server-side, use {@link #getBuffer()} with appropriate offsets and
* lengths instead.
* Use {@link #getBuffer()} with appropriate offsets and lengths instead.
* @return Returns qualifier. Makes a copy.
*/
@Override
@Deprecated // use CellUtil.getQualifierArray
public byte [] getQualifier() {
return CellUtil.cloneQualifier(this);
}
/** /**
* This returns the offset where the tag actually starts. * This returns the offset where the tag actually starts.
*/ */

View File

@ -161,15 +161,16 @@ public class KeyValueTestUtil {
} }
protected static String getRowString(final KeyValue kv) { protected static String getRowString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getRow()); return Bytes.toStringBinary(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength());
} }
protected static String getFamilyString(final KeyValue kv) { protected static String getFamilyString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getFamily()); return Bytes.toStringBinary(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength());
} }
protected static String getQualifierString(final KeyValue kv) { protected static String getQualifierString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getQualifier()); return Bytes.toStringBinary(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength());
} }
protected static String getTimestampString(final KeyValue kv) { protected static String getTimestampString(final KeyValue kv) {
@ -177,11 +178,11 @@ public class KeyValueTestUtil {
} }
protected static String getTypeString(final KeyValue kv) { protected static String getTypeString(final KeyValue kv) {
return KeyValue.Type.codeToType(kv.getType()).toString(); return KeyValue.Type.codeToType(kv.getTypeByte()).toString();
} }
protected static String getValueString(final KeyValue kv) { protected static String getValueString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getValue()); return Bytes.toStringBinary(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
} }
} }

View File

@ -81,7 +81,7 @@ public class KeyValueUtil {
public static int lengthWithMvccVersion(final KeyValue kv, final boolean includeMvccVersion) { public static int lengthWithMvccVersion(final KeyValue kv, final boolean includeMvccVersion) {
int length = kv.getLength(); int length = kv.getLength();
if (includeMvccVersion) { if (includeMvccVersion) {
length += WritableUtils.getVIntSize(kv.getMvccVersion()); length += WritableUtils.getVIntSize(kv.getSequenceId());
} }
return length; return length;
} }
@ -101,7 +101,7 @@ public class KeyValueUtil {
public static KeyValue copyToNewKeyValue(final Cell cell) { public static KeyValue copyToNewKeyValue(final Cell cell) {
byte[] bytes = copyToNewByteArray(cell); byte[] bytes = copyToNewByteArray(cell);
KeyValue kvCell = new KeyValue(bytes, 0, bytes.length); KeyValue kvCell = new KeyValue(bytes, 0, bytes.length);
kvCell.setSequenceId(cell.getMvccVersion()); kvCell.setSequenceId(cell.getSequenceId());
return kvCell; return kvCell;
} }
@ -173,9 +173,9 @@ public class KeyValueUtil {
bb.limit(bb.position() + kv.getLength()); bb.limit(bb.position() + kv.getLength());
bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includeMvccVersion) { if (includeMvccVersion) {
int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getMvccVersion()); int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getSequenceId());
ByteBufferUtils.extendLimit(bb, numMvccVersionBytes); ByteBufferUtils.extendLimit(bb, numMvccVersionBytes);
ByteBufferUtils.writeVLong(bb, kv.getMvccVersion()); ByteBufferUtils.writeVLong(bb, kv.getSequenceId());
} }
} }

View File

@ -56,7 +56,7 @@ public class CellCodec implements Codec {
// Value // Value
write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
// MvccVersion // MvccVersion
this.out.write(Bytes.toBytes(cell.getMvccVersion())); this.out.write(Bytes.toBytes(cell.getSequenceId()));
} }
/** /**

View File

@ -58,7 +58,7 @@ public class CellCodecWithTags implements Codec {
// Tags // Tags
write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
// MvccVersion // MvccVersion
this.out.write(Bytes.toBytes(cell.getMvccVersion())); this.out.write(Bytes.toBytes(cell.getSequenceId()));
} }
/** /**

View File

@ -24,13 +24,12 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.SettableSequenceId; import org.apache.hadoop.hbase.SettableSequenceId;
import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteBufferOutputStream; import org.apache.hadoop.hbase.io.ByteBufferOutputStream;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
@ -256,11 +255,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return currentKey.getTypeByte(); return currentKey.getTypeByte();
} }
@Override
public long getMvccVersion() {
return memstoreTS;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return memstoreTS; return memstoreTS;
@ -302,30 +296,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return tagsLength; return tagsLength;
} }
@Override
@Deprecated
public byte[] getValue() {
throw new UnsupportedOperationException("getValue() not supported");
}
@Override
@Deprecated
public byte[] getFamily() {
throw new UnsupportedOperationException("getFamily() not supported");
}
@Override
@Deprecated
public byte[] getQualifier() {
throw new UnsupportedOperationException("getQualifier() not supported");
}
@Override
@Deprecated
public byte[] getRow() {
throw new UnsupportedOperationException("getRow() not supported");
}
@Override @Override
public String toString() { public String toString() {
return KeyValue.keyToString(this.keyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen=" return KeyValue.keyToString(this.keyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="
@ -455,12 +425,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return typeByte; return typeByte;
} }
@Override
@Deprecated
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return seqId; return seqId;
@ -502,30 +466,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return tagsLength; return tagsLength;
} }
@Override
@Deprecated
public byte[] getValue() {
return CellUtil.cloneValue(this);
}
@Override
@Deprecated
public byte[] getFamily() {
return CellUtil.cloneFamily(this);
}
@Override
@Deprecated
public byte[] getQualifier() {
return CellUtil.cloneQualifier(this);
}
@Override
@Deprecated
public byte[] getRow() {
return CellUtil.cloneRow(this);
}
@Override @Override
public String toString() { public String toString() {
return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen=" return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="

View File

@ -301,7 +301,7 @@ public class RedundantKVGenerator {
for (KeyValue kv : keyValues) { for (KeyValue kv : keyValues) {
totalSize += kv.getLength(); totalSize += kv.getLength();
if (includesMemstoreTS) { if (includesMemstoreTS) {
totalSize += WritableUtils.getVIntSize(kv.getMvccVersion()); totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
} }
} }
@ -309,7 +309,7 @@ public class RedundantKVGenerator {
for (KeyValue kv : keyValues) { for (KeyValue kv : keyValues) {
result.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includesMemstoreTS) { if (includesMemstoreTS) {
ByteBufferUtils.writeVLong(result, kv.getMvccVersion()); ByteBufferUtils.writeVLong(result, kv.getSequenceId());
} }
} }
return result; return result;

View File

@ -18,7 +18,9 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
@ -154,12 +156,6 @@ public class TestCellUtil {
return 0; return 0;
} }
@Override
public long getMvccVersion() {
// TODO Auto-generated method stub
return 0;
}
@Override @Override
public byte[] getValueArray() { public byte[] getValueArray() {
// TODO Auto-generated method stub // TODO Auto-generated method stub
@ -190,30 +186,6 @@ public class TestCellUtil {
return 0; return 0;
} }
@Override
public byte[] getValue() {
// TODO Auto-generated method stub
return null;
}
@Override
public byte[] getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public byte[] getQualifier() {
// TODO Auto-generated method stub
return null;
}
@Override
public byte[] getRow() {
// TODO Auto-generated method stub
return null;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
// TODO Auto-generated method stub // TODO Auto-generated method stub
@ -592,11 +564,6 @@ public class TestCellUtil {
return KeyValue.Type.Put.getCode(); return KeyValue.Type.Put.getCode();
} }
@Override
public long getMvccVersion() {
return 0;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return 0; return 0;
@ -639,26 +606,6 @@ public class TestCellUtil {
return tagsLen; return tagsLen;
} }
@Override
public byte[] getValue() {
throw new UnsupportedOperationException();
}
@Override
public byte[] getFamily() {
throw new UnsupportedOperationException();
}
@Override
public byte[] getQualifier() {
throw new UnsupportedOperationException();
}
@Override
public byte[] getRow() {
throw new UnsupportedOperationException();
}
@Override @Override
public ByteBuffer getRowByteBuffer() { public ByteBuffer getRowByteBuffer() {
return this.buffer; return this.buffer;

View File

@ -93,7 +93,8 @@ public class TestKeyValue extends TestCase {
private void check(final byte [] row, final byte [] family, byte [] qualifier, private void check(final byte [] row, final byte [] family, byte [] qualifier,
final long timestamp, final byte [] value) { final long timestamp, final byte [] value) {
KeyValue kv = new KeyValue(row, family, qualifier, timestamp, value); KeyValue kv = new KeyValue(row, family, qualifier, timestamp, value);
assertTrue(Bytes.compareTo(kv.getRow(), row) == 0); assertTrue(Bytes.compareTo(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
row.length) == 0);
assertTrue(CellUtil.matchingColumn(kv, family, qualifier)); assertTrue(CellUtil.matchingColumn(kv, family, qualifier));
// Call toString to make sure it works. // Call toString to make sure it works.
LOG.info(kv.toString()); LOG.info(kv.toString());
@ -390,9 +391,10 @@ public class TestKeyValue extends TestCase {
// keys are still the same // keys are still the same
assertTrue(kv1.equals(kv1ko)); assertTrue(kv1.equals(kv1ko));
// but values are not // but values are not
assertTrue(kv1ko.getValue().length == (useLen?Bytes.SIZEOF_INT:0)); assertTrue(kv1ko.getValueLength() == (useLen?Bytes.SIZEOF_INT:0));
if (useLen) { if (useLen) {
assertEquals(kv1.getValueLength(), Bytes.toInt(kv1ko.getValue())); assertEquals(kv1.getValueLength(),
Bytes.toInt(kv1ko.getValueArray(), kv1ko.getValueOffset(), kv1ko.getValueLength()));
} }
} }
} }
@ -442,10 +444,14 @@ public class TestKeyValue extends TestCase {
KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] { KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] {
new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) }); new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) });
assertTrue(kv.getTagsLength() > 0); assertTrue(kv.getTagsLength() > 0);
assertTrue(Bytes.equals(kv.getRow(), row)); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
assertTrue(Bytes.equals(kv.getFamily(), cf)); row.length));
assertTrue(Bytes.equals(kv.getQualifier(), q)); assertTrue(Bytes.equals(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(), cf, 0,
assertTrue(Bytes.equals(kv.getValue(), value)); cf.length));
assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), q, 0, q.length));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value, 0,
value.length));
List<Tag> tags = kv.getTags(); List<Tag> tags = kv.getTags();
assertNotNull(tags); assertNotNull(tags);
assertEquals(2, tags.size()); assertEquals(2, tags.size());
@ -590,12 +596,6 @@ public class TestKeyValue extends TestCase {
return this.kv.getTagsOffset(); return this.kv.getTagsOffset();
} }
// used to achieve atomic operations in the memstore.
@Override
public long getMvccVersion() {
return this.kv.getMvccVersion();
}
/** /**
* used to achieve atomic operations in the memstore. * used to achieve atomic operations in the memstore.
*/ */
@ -729,34 +729,6 @@ public class TestKeyValue extends TestCase {
return this.kv.getQualifierLength(); return this.kv.getQualifierLength();
} }
@Override
@Deprecated
public byte[] getValue() {
// TODO Auto-generated method stub
return null;
}
@Override
@Deprecated
public byte[] getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
@Deprecated
public byte[] getQualifier() {
// TODO Auto-generated method stub
return null;
}
@Override
@Deprecated
public byte[] getRow() {
// TODO Auto-generated method stub
return null;
}
/** /**
* @return the backing array of the entire KeyValue (all KeyValue fields are * @return the backing array of the entire KeyValue (all KeyValue fields are
* in a single array) * in a single array)

View File

@ -40,7 +40,7 @@ public class TestByteRangeWithKVSerialization {
pbr.put((byte) (tagsLen >> 8 & 0xff)); pbr.put((byte) (tagsLen >> 8 & 0xff));
pbr.put((byte) (tagsLen & 0xff)); pbr.put((byte) (tagsLen & 0xff));
pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen); pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen);
pbr.putVLong(kv.getMvccVersion()); pbr.putVLong(kv.getSequenceId());
} }
static KeyValue readCell(PositionedByteRange pbr) throws Exception { static KeyValue readCell(PositionedByteRange pbr) throws Exception {
@ -88,7 +88,7 @@ public class TestByteRangeWithKVSerialization {
Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(), Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(),
kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(), kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(),
kv1.getTagsLength())); kv1.getTagsLength()));
Assert.assertEquals(kv1.getMvccVersion(), kv.getMvccVersion()); Assert.assertEquals(kv1.getSequenceId(), kv.getSequenceId());
} }
} }
} }

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
@ -343,7 +344,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
delete = new Delete(key.get()); delete = new Delete(key.get());
} }
delete.setCellVisibility(new CellVisibility(visibilityExps)); delete.setCellVisibility(new CellVisibility(visibilityExps));
delete.deleteFamily(kv.getFamily()); delete.deleteFamily(CellUtil.cloneFamily(kv));
} }
if (delete != null) { if (delete != null) {
context.write(key, delete); context.write(key, delete);

View File

@ -102,7 +102,7 @@ public class PrefixTreeCodec implements DataBlockEncoder {
ByteBufferUtils.skip(result, keyValueLength); ByteBufferUtils.skip(result, keyValueLength);
offset += keyValueLength; offset += keyValueLength;
if (includesMvcc) { if (includesMvcc) {
ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion()); ByteBufferUtils.writeVLong(result, currentCell.getSequenceId());
} }
} }
result.position(result.limit());//make it appear as if we were appending result.position(result.limit());//make it appear as if we were appending

View File

@ -314,12 +314,6 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return type; return type;
} }
@Override
@Deprecated
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return seqId; return seqId;
@ -355,30 +349,6 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return this.tagsLength; return this.tagsLength;
} }
@Override
@Deprecated
public byte[] getValue() {
return this.val;
}
@Override
@Deprecated
public byte[] getFamily() {
return this.fam;
}
@Override
@Deprecated
public byte[] getQualifier() {
return this.qual;
}
@Override
@Deprecated
public byte[] getRow() {
return this.row;
}
@Override @Override
public String toString() { public String toString() {
String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()); String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());

View File

@ -131,18 +131,13 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
} }
@Override @Override
public long getMvccVersion() { public long getSequenceId() {
if (!includeMvccVersion) { if (!includeMvccVersion) {
return 0L; return 0L;
} }
return mvccVersion; return mvccVersion;
} }
@Override
public long getSequenceId() {
return getMvccVersion();
}
@Override @Override
public int getValueLength() { public int getValueLength() {
return valueLength; return valueLength;
@ -208,27 +203,6 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
return type.getCode(); return type.getCode();
} }
/* Deprecated methods pushed into the Cell interface */
@Override
public byte[] getValue() {
return CellUtil.cloneValue(this);
}
@Override
public byte[] getFamily() {
return CellUtil.cloneFamily(this);
}
@Override
public byte[] getQualifier() {
return CellUtil.cloneQualifier(this);
}
@Override
public byte[] getRow() {
return CellUtil.cloneRow(this);
}
/************************* helper methods *************************/ /************************* helper methods *************************/
/** /**

View File

@ -296,9 +296,9 @@ public class PrefixTreeEncoder implements CellOutputStream {
// memstore timestamps // memstore timestamps
if (includeMvccVersion) { if (includeMvccVersion) {
mvccVersions[totalCells] = cell.getMvccVersion(); mvccVersions[totalCells] = cell.getSequenceId();
mvccVersionEncoder.add(cell.getMvccVersion()); mvccVersionEncoder.add(cell.getSequenceId());
totalUnencodedBytes += WritableUtils.getVIntSize(cell.getMvccVersion()); totalUnencodedBytes += WritableUtils.getVIntSize(cell.getSequenceId());
}else{ }else{
//must overwrite in case there was a previous version in this array slot //must overwrite in case there was a previous version in this array slot
mvccVersions[totalCells] = 0L; mvccVersions[totalCells] = 0L;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.column.data;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.column.TestColumnData; import org.apache.hadoop.hbase.codec.prefixtree.column.TestColumnData;
import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRange;
@ -40,7 +41,7 @@ public class TestColumnDataRandom implements TestColumnData {
ByteRangeSet sortedColumns = new ByteRangeTreeSet(); ByteRangeSet sortedColumns = new ByteRangeTreeSet();
List<KeyValue> d = generator.generateTestKeyValues(numColumns); List<KeyValue> d = generator.generateTestKeyValues(numColumns);
for (KeyValue col : d) { for (KeyValue col : d) {
ByteRange colRange = new SimpleMutableByteRange(col.getQualifier()); ByteRange colRange = new SimpleMutableByteRange(CellUtil.cloneQualifier(col));
inputs.add(colRange); inputs.add(colRange);
sortedColumns.add(colRange); sortedColumns.add(colRange);
} }

View File

@ -181,7 +181,7 @@ public class TestRowEncoder {
// assert keys are equal (doesn't compare values) // assert keys are equal (doesn't compare values)
Assert.assertEquals(expected, actual); Assert.assertEquals(expected, actual);
if (includeMemstoreTS) { if (includeMemstoreTS) {
Assert.assertEquals(expected.getMvccVersion(), actual.getMvccVersion()); Assert.assertEquals(expected.getSequenceId(), actual.getSequenceId());
} }
// assert values equal // assert values equal
Assert.assertTrue(Bytes.equals(expected.getValueArray(), expected.getValueOffset(), Assert.assertTrue(Bytes.equals(expected.getValueArray(), expected.getValueOffset(),

View File

@ -115,7 +115,8 @@ public class RemoteHTable implements Table {
if (o instanceof byte[]) { if (o instanceof byte[]) {
sb.append(Bytes.toStringBinary((byte[])o)); sb.append(Bytes.toStringBinary((byte[])o));
} else if (o instanceof KeyValue) { } else if (o instanceof KeyValue) {
sb.append(Bytes.toStringBinary(((KeyValue)o).getQualifier())); sb.append(Bytes.toStringBinary(((KeyValue) o).getRowArray(),
((KeyValue) o).getRowOffset(), ((KeyValue) o).getRowLength()));
} else { } else {
throw new RuntimeException("object type not handled"); throw new RuntimeException("object type not handled");
} }

View File

@ -19,6 +19,10 @@
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.StringWriter; import java.io.StringWriter;
import java.util.ArrayList; import java.util.ArrayList;
@ -32,16 +36,24 @@ import javax.xml.bind.Unmarshaller;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.filter.InclusiveStopFilter; import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.PageFilter;
@ -52,8 +64,6 @@ import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SkipFilter; import org.apache.hadoop.hbase.filter.SkipFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator; import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.client.Response;
@ -64,9 +74,6 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import static org.junit.Assert.*;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -286,14 +293,14 @@ public class TestScannersWithFilters {
kvs.length >= idx + cells.size()); kvs.length >= idx + cells.size());
for (CellModel cell: cells) { for (CellModel cell: cells) {
assertTrue("Row mismatch", assertTrue("Row mismatch",
Bytes.equals(rowModel.getKey(), kvs[idx].getRow())); Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx])));
byte[][] split = KeyValue.parseColumn(cell.getColumn()); byte[][] split = KeyValue.parseColumn(cell.getColumn());
assertTrue("Family mismatch", assertTrue("Family mismatch",
Bytes.equals(split[0], kvs[idx].getFamily())); Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx])));
assertTrue("Qualifier mismatch", assertTrue("Qualifier mismatch",
Bytes.equals(split[1], kvs[idx].getQualifier())); Bytes.equals(split[1], CellUtil.cloneQualifier(kvs[idx])));
assertTrue("Value mismatch", assertTrue("Value mismatch",
Bytes.equals(cell.getValue(), kvs[idx].getValue())); Bytes.equals(cell.getValue(), CellUtil.cloneValue(kvs[idx])));
idx++; idx++;
} }
} }

View File

@ -106,12 +106,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
return cell.getTypeByte(); return cell.getTypeByte();
} }
@Override
@Deprecated
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return cell.getSequenceId(); return cell.getSequenceId();
@ -151,30 +145,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
return this.tags.length; return this.tags.length;
} }
@Override
@Deprecated
public byte[] getValue() {
return cell.getValue();
}
@Override
@Deprecated
public byte[] getFamily() {
return cell.getFamily();
}
@Override
@Deprecated
public byte[] getQualifier() {
return cell.getQualifier();
}
@Override
@Deprecated
public byte[] getRow() {
return cell.getRow();
}
@Override @Override
public long heapSize() { public long heapSize() {
long sum = CellUtil.estimatedHeapSizeOf(cell) - cell.getTagsLength(); long sum = CellUtil.estimatedHeapSizeOf(cell) - cell.getTagsLength();

View File

@ -17,6 +17,12 @@
*/ */
package org.apache.hadoop.hbase.mapreduce; package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -48,12 +54,6 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.TreeMap;
/** /**
* A tool to replay WAL files as a M/R job. * A tool to replay WAL files as a M/R job.
* The WAL can be replayed for a set of tables or all tables, * The WAL can be replayed for a set of tables or all tables,
@ -106,8 +106,8 @@ public class WALPlayer extends Configured implements Tool {
if (Bytes.equals(table, key.getTablename().getName())) { if (Bytes.equals(table, key.getTablename().getName())) {
for (Cell cell : value.getCells()) { for (Cell cell : value.getCells()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (WALEdit.isMetaEditFamily(kv.getFamily())) continue; if (WALEdit.isMetaEditFamily(kv)) continue;
context.write(new ImmutableBytesWritable(kv.getRow()), kv); context.write(new ImmutableBytesWritable(CellUtil.cloneRow(kv)), kv);
} }
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -149,7 +149,7 @@ public class WALPlayer extends Configured implements Tool {
Cell lastCell = null; Cell lastCell = null;
for (Cell cell : value.getCells()) { for (Cell cell : value.getCells()) {
// filtering WAL meta entries // filtering WAL meta entries
if (WALEdit.isMetaEditFamily(cell.getFamily())) continue; if (WALEdit.isMetaEditFamily(cell)) continue;
// Allow a subclass filter out this cell. // Allow a subclass filter out this cell.
if (filter(context, cell)) { if (filter(context, cell)) {
@ -163,9 +163,9 @@ public class WALPlayer extends Configured implements Tool {
if (put != null) context.write(tableOut, put); if (put != null) context.write(tableOut, put);
if (del != null) context.write(tableOut, del); if (del != null) context.write(tableOut, del);
if (CellUtil.isDelete(cell)) { if (CellUtil.isDelete(cell)) {
del = new Delete(cell.getRow()); del = new Delete(CellUtil.cloneRow(cell));
} else { } else {
put = new Put(cell.getRow()); put = new Put(CellUtil.cloneRow(cell));
} }
} }
if (CellUtil.isDelete(cell)) { if (CellUtil.isDelete(cell)) {

View File

@ -329,7 +329,7 @@ public class ScanQueryMatcher {
* they affect * they affect
*/ */
byte typeByte = cell.getTypeByte(); byte typeByte = cell.getTypeByte();
long mvccVersion = cell.getMvccVersion(); long mvccVersion = cell.getSequenceId();
if (CellUtil.isDelete(cell)) { if (CellUtil.isDelete(cell)) {
if (keepDeletedCells == KeepDeletedCells.FALSE if (keepDeletedCells == KeepDeletedCells.FALSE
|| (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) { || (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) {

View File

@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue; import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue;
@ -168,7 +169,7 @@ public class StripeStoreFileManager
// Order matters for this call. // Order matters for this call.
result.addSublist(state.level0Files); result.addSublist(state.level0Files);
if (!state.stripeFiles.isEmpty()) { if (!state.stripeFiles.isEmpty()) {
int lastStripeIndex = findStripeForRow(targetKey.getRow(), false); int lastStripeIndex = findStripeForRow(CellUtil.cloneRow(targetKey), false);
for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) { for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) {
result.addSublist(state.stripeFiles.get(stripeIndex)); result.addSublist(state.stripeFiles.get(stripeIndex));
} }

View File

@ -288,7 +288,7 @@ public class WALEdit implements Writable, HeapSize {
public static FlushDescriptor getFlushDescriptor(Cell cell) throws IOException { public static FlushDescriptor getFlushDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, FLUSH)) { if (CellUtil.matchingColumn(cell, METAFAMILY, FLUSH)) {
return FlushDescriptor.parseFrom(cell.getValue()); return FlushDescriptor.parseFrom(CellUtil.cloneValue(cell));
} }
return null; return null;
} }
@ -302,7 +302,7 @@ public class WALEdit implements Writable, HeapSize {
public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException { public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) { if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) {
return RegionEventDescriptor.parseFrom(cell.getValue()); return RegionEventDescriptor.parseFrom(CellUtil.cloneValue(cell));
} }
return null; return null;
} }
@ -336,7 +336,7 @@ public class WALEdit implements Writable, HeapSize {
*/ */
public static CompactionDescriptor getCompaction(Cell kv) throws IOException { public static CompactionDescriptor getCompaction(Cell kv) throws IOException {
if (CellUtil.matchingColumn(kv, METAFAMILY, COMPACTION)) { if (CellUtil.matchingColumn(kv, METAFAMILY, COMPACTION)) {
return CompactionDescriptor.parseFrom(kv.getValue()); return CompactionDescriptor.parseFrom(CellUtil.cloneValue(kv));
} }
return null; return null;
} }
@ -365,7 +365,7 @@ public class WALEdit implements Writable, HeapSize {
*/ */
public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException { public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) { if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) {
return WALProtos.BulkLoadDescriptor.parseFrom(cell.getValue()); return WALProtos.BulkLoadDescriptor.parseFrom(CellUtil.cloneValue(cell));
} }
return null; return null;
} }

View File

@ -23,6 +23,7 @@ import java.util.NavigableMap;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WAL.Entry;
@ -44,8 +45,8 @@ public class ScopeWALEntryFilter implements WALEntryFilter {
Cell cell = cells.get(i); Cell cell = cells.get(i);
// The scope will be null or empty if // The scope will be null or empty if
// there's nothing to replicate in that WALEdit // there's nothing to replicate in that WALEdit
if (!scopes.containsKey(cell.getFamily()) byte[] fam = CellUtil.cloneFamily(cell);
|| scopes.get(cell.getFamily()) == HConstants.REPLICATION_SCOPE_LOCAL) { if (!scopes.containsKey(fam) || scopes.get(fam) == HConstants.REPLICATION_SCOPE_LOCAL) {
cells.remove(i); cells.remove(i);
} }
} }

View File

@ -25,9 +25,10 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
public class TableCfWALEntryFilter implements WALEntryFilter { public class TableCfWALEntryFilter implements WALEntryFilter {
@ -62,7 +63,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
Cell cell = cells.get(i); Cell cell = cells.get(i);
// ignore(remove) kv if its cf isn't in the replicable cf list // ignore(remove) kv if its cf isn't in the replicable cf list
// (empty cfs means all cfs of this table are replicable) // (empty cfs means all cfs of this table are replicable)
if ((cfs != null && !cfs.contains(Bytes.toString(cell.getFamily())))) { if ((cfs != null && !cfs.contains(Bytes.toString(CellUtil.cloneFamily(cell))))) {
cells.remove(i); cells.remove(i);
} }
} }

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -354,7 +355,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
} }
sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(), sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(),
entries.get(0).getEdit().getCells().get(0).getRow(), entries); CellUtil.cloneRow(entries.get(0).getEdit().getCells().get(0)), entries);
} }
@Override @Override

View File

@ -244,7 +244,7 @@ public class Replication extends WALActionsListener.Base implements
new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR); new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
byte[] family; byte[] family;
for (Cell cell : logEdit.getCells()) { for (Cell cell : logEdit.getCells()) {
family = cell.getFamily(); family = CellUtil.cloneFamily(cell);
// This is expected and the KV should not be replicated // This is expected and the KV should not be replicated
if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue; if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;
// Unexpected, has a tendency to happen in unit tests // Unexpected, has a tendency to happen in unit tests

View File

@ -329,9 +329,9 @@ public class AccessController extends BaseMasterAndRegionObserver
List<KeyValue> kvList = (List<KeyValue>)family.getValue(); List<KeyValue> kvList = (List<KeyValue>)family.getValue();
for (KeyValue kv : kvList) { for (KeyValue kv : kvList) {
if (!authManager.authorize(user, tableName, family.getKey(), if (!authManager.authorize(user, tableName, family.getKey(),
kv.getQualifier(), permRequest)) { CellUtil.cloneQualifier(kv), permRequest)) {
return AuthResult.deny(request, "Failed qualifier check", user, return AuthResult.deny(request, "Failed qualifier check", user, permRequest,
permRequest, tableName, makeFamilyMap(family.getKey(), kv.getQualifier())); tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv)));
} }
} }
} }

View File

@ -880,7 +880,7 @@ public class HBaseFsck extends Configured implements Closeable {
hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf()); hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf());
hf.loadFileInfo(); hf.loadFileInfo();
Cell startKv = hf.getFirstKey(); Cell startKv = hf.getFirstKey();
start = startKv.getRow(); start = CellUtil.cloneRow(startKv);
Cell endKv = hf.getLastKey(); Cell endKv = hf.getLastKey();
end = CellUtil.cloneRow(endKv); end = CellUtil.cloneRow(endKv);
} catch (IOException ioe) { } catch (IOException ioe) {

View File

@ -33,8 +33,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -43,14 +41,14 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
// imports for things that haven't moved yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
/** /**
* WALPrettyPrinter prints the contents of a given WAL with a variety of * WALPrettyPrinter prints the contents of a given WAL with a variety of
* options affecting formatting and extent of content. * options affecting formatting and extent of content.
@ -288,7 +286,7 @@ public class WALPrettyPrinter {
for (Cell cell : edit.getCells()) { for (Cell cell : edit.getCells()) {
// add atomic operation to txn // add atomic operation to txn
Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell)); Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue())); if (outputValues) op.put("value", Bytes.toStringBinary(CellUtil.cloneValue(cell)));
// check row output filter // check row output filter
if (row == null || ((String) op.get("row")).equals(row)) { if (row == null || ((String) op.get("row")).equals(row)) {
actions.add(op); actions.add(op);

View File

@ -3744,11 +3744,11 @@ public class TestFromClientSide {
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0); KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0);
assertTrue(Bytes.equals(kv.getFamily(), CONTENTS_FAMILY)); assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), CONTENTS_FAMILY));
// will it return null or an empty byte array? // will it return null or an empty byte array?
assertTrue(Bytes.equals(kv.getQualifier(), new byte[0])); assertTrue(Bytes.equals(CellUtil.cloneQualifier(kv), new byte[0]));
assertTrue(Bytes.equals(kv.getValue(), value)); assertTrue(Bytes.equals(CellUtil.cloneValue(kv), value));
table.put(put); table.put(put);
@ -5778,8 +5778,11 @@ public class TestFromClientSide {
int expectedIndex = 5; int expectedIndex = 5;
for (Result result : scanner) { for (Result result : scanner) {
assertEquals(result.size(), 1); assertEquals(result.size(), 1);
assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[expectedIndex])); Cell c = result.rawCells()[0];
assertTrue(Bytes.equals(result.rawCells()[0].getQualifier(), QUALIFIERS[expectedIndex])); assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
ROWS[expectedIndex], 0, ROWS[expectedIndex].length));
assertTrue(Bytes.equals(c.getQualifierArray(), c.getQualifierOffset(),
c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length));
expectedIndex--; expectedIndex--;
} }
assertEquals(expectedIndex, 0); assertEquals(expectedIndex, 0);
@ -5817,7 +5820,7 @@ public class TestFromClientSide {
for (Result result : ht.getScanner(scan)) { for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1); assertEquals(result.size(), 1);
assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT); assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
assertEquals(Bytes.toInt(result.rawCells()[0].getValue()), VALUE.length); assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
count++; count++;
} }
assertEquals(count, 10); assertEquals(count, 10);
@ -6099,15 +6102,15 @@ public class TestFromClientSide {
result = scanner.next(); result = scanner.next();
assertTrue("Expected 2 keys but received " + result.size(), assertTrue("Expected 2 keys but received " + result.size(),
result.size() == 2); result.size() == 2);
assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[4])); assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[4]));
assertTrue(Bytes.equals(result.rawCells()[1].getRow(), ROWS[4])); assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[1]), ROWS[4]));
assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[1])); assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[1]));
assertTrue(Bytes.equals(result.rawCells()[1].getValue(), VALUES[2])); assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[1]), VALUES[2]));
result = scanner.next(); result = scanner.next();
assertTrue("Expected 1 key but received " + result.size(), assertTrue("Expected 1 key but received " + result.size(),
result.size() == 1); result.size() == 1);
assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[3])); assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[3]));
assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[0])); assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[0]));
scanner.close(); scanner.close();
ht.close(); ht.close();
} }

View File

@ -27,6 +27,7 @@ import java.util.ConcurrentModificationException;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -106,8 +107,8 @@ public class TestPutDeleteEtcCellIteration {
Cell cell = cellScanner.current(); Cell cell = cellScanner.current();
byte [] bytes = Bytes.toBytes(index++); byte [] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue)cell; KeyValue kv = (KeyValue)cell;
assertTrue(Bytes.equals(kv.getFamily(), bytes)); assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
assertTrue(Bytes.equals(kv.getValue(), bytes)); assertTrue(Bytes.equals(CellUtil.cloneValue(kv), bytes));
} }
assertEquals(COUNT, index); assertEquals(COUNT, index);
} }
@ -125,8 +126,8 @@ public class TestPutDeleteEtcCellIteration {
int value = index; int value = index;
byte [] bytes = Bytes.toBytes(index++); byte [] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue)cell; KeyValue kv = (KeyValue)cell;
assertTrue(Bytes.equals(kv.getFamily(), bytes)); assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
long a = Bytes.toLong(kv.getValue()); long a = Bytes.toLong(CellUtil.cloneValue(kv));
assertEquals(value, a); assertEquals(value, a);
} }
assertEquals(COUNT, index); assertEquals(COUNT, index);

View File

@ -20,19 +20,19 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WALKey;
/** /**
* Class for testing WALObserver coprocessor. * Class for testing WALObserver coprocessor.
@ -119,8 +119,8 @@ implements WALObserver {
Cell deletedCell = null; Cell deletedCell = null;
for (Cell cell : cells) { for (Cell cell : cells) {
// assume only one kv from the WALEdit matches. // assume only one kv from the WALEdit matches.
byte[] family = cell.getFamily(); byte[] family = CellUtil.cloneFamily(cell);
byte[] qulifier = cell.getQualifier(); byte[] qulifier = CellUtil.cloneQualifier(cell);
if (Arrays.equals(family, ignoredFamily) && if (Arrays.equals(family, ignoredFamily) &&
Arrays.equals(qulifier, ignoredQualifier)) { Arrays.equals(qulifier, ignoredQualifier)) {

View File

@ -66,10 +66,10 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFile.Reader; import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALKey;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -384,20 +384,23 @@ public class SimpleRegionObserver extends BaseRegionObserver {
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
KeyValue kv = (KeyValue)cells.get(0); KeyValue kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
TestRegionObserverInterface.A)); kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
TestRegionObserverInterface.A.length));
cells = familyMap.get(TestRegionObserverInterface.B); cells = familyMap.get(TestRegionObserverInterface.B);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
TestRegionObserverInterface.B)); kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
TestRegionObserverInterface.B.length));
cells = familyMap.get(TestRegionObserverInterface.C); cells = familyMap.get(TestRegionObserverInterface.C);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
TestRegionObserverInterface.C)); kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
TestRegionObserverInterface.C.length));
} }
ctPrePut.incrementAndGet(); ctPrePut.incrementAndGet();
} }
@ -418,19 +421,25 @@ public class SimpleRegionObserver extends BaseRegionObserver {
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)cells.get(0); KeyValue kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.A)); assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
TestRegionObserverInterface.A.length));
cells = familyMap.get(TestRegionObserverInterface.B); cells = familyMap.get(TestRegionObserverInterface.B);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.B)); assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
TestRegionObserverInterface.B.length));
cells = familyMap.get(TestRegionObserverInterface.C); cells = familyMap.get(TestRegionObserverInterface.C);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.C)); assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
TestRegionObserverInterface.C.length));
} }
ctPostPut.incrementAndGet(); ctPostPut.incrementAndGet();
} }

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -51,11 +52,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.wal.DefaultWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -63,14 +59,19 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.DefaultWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
/** /**
* Tests invocation of the * Tests invocation of the
@ -216,14 +217,14 @@ public class TestWALObserver {
List<Cell> cells = edit.getCells(); List<Cell> cells = edit.getCells();
for (Cell cell : cells) { for (Cell cell : cells) {
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true; foundFamily0 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true; foundFamily2 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) { if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true; modifiedFamily1 = true;
} }
} }
@ -244,14 +245,14 @@ public class TestWALObserver {
foundFamily2 = false; foundFamily2 = false;
modifiedFamily1 = false; modifiedFamily1 = false;
for (Cell cell : cells) { for (Cell cell : cells) {
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true; foundFamily0 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true; foundFamily2 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) { if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true; modifiedFamily1 = true;
} }
} }

View File

@ -534,7 +534,7 @@ public class TestFilter {
ArrayList<Cell> values = new ArrayList<Cell>(); ArrayList<Cell> values = new ArrayList<Cell>();
boolean isMoreResults = scanner.next(values); boolean isMoreResults = scanner.next(values);
if (!isMoreResults if (!isMoreResults
|| !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) { || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) {
Assert.assertTrue( Assert.assertTrue(
"The WhileMatchFilter should now filter all remaining", "The WhileMatchFilter should now filter all remaining",
filter.filterAllRemaining()); filter.filterAllRemaining());
@ -1558,7 +1558,7 @@ public class TestFilter {
}; };
for(KeyValue kv : srcKVs) { for(KeyValue kv : srcKVs) {
Put put = new Put(kv.getRow()).add(kv); Put put = new Put(CellUtil.cloneRow(kv)).add(kv);
put.setDurability(Durability.SKIP_WAL); put.setDurability(Durability.SKIP_WAL);
this.region.put(put); this.region.put(put);
} }
@ -1597,7 +1597,7 @@ public class TestFilter {
// Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0] // Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0]
KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]); KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]);
this.region.put(new Put(kvA.getRow()).add(kvA)); this.region.put(new Put(CellUtil.cloneRow(kvA)).add(kvA));
// Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true // Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true
// Expect 1 row (3) // Expect 1 row (3)

View File

@ -541,12 +541,13 @@ public class TestFilterList {
// Value for fam:qual1 should be stripped: // Value for fam:qual1 should be stripped:
assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1)); assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1));
final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1)); final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1));
assertEquals(0, transformedQual1.getValue().length); assertEquals(0, transformedQual1.getValueLength());
// Value for fam:qual2 should not be stripped: // Value for fam:qual2 should not be stripped:
assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2)); assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2));
final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2)); final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2));
assertEquals("value", Bytes.toString(transformedQual2.getValue())); assertEquals("value", Bytes.toString(transformedQual2.getValueArray(),
transformedQual2.getValueOffset(), transformedQual2.getValueLength()));
// Other keys should be skipped: // Other keys should be skipped:
assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3)); assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3));

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
@ -102,7 +103,7 @@ public class TestHalfStoreFileReader {
HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
r.loadFileInfo(); r.loadFileInfo();
Cell midKV = r.midkey(); Cell midKV = r.midkey();
byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow(); byte[] midkey = CellUtil.cloneRow(midKV);
//System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey)); //System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
@ -167,7 +168,7 @@ public class TestHalfStoreFileReader {
HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
r.loadFileInfo(); r.loadFileInfo();
Cell midKV = r.midkey(); Cell midKV = r.midkey();
byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow(); byte[] midkey = CellUtil.cloneRow(midKV);
Reference bottom = new Reference(midkey, Reference.Range.bottom); Reference bottom = new Reference(midkey, Reference.Range.bottom);
Reference top = new Reference(midkey, Reference.Range.top); Reference top = new Reference(midkey, Reference.Range.top);

View File

@ -37,6 +37,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
@ -132,7 +133,8 @@ public class TestPrefixTreeEncoding {
new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
.getKeyLength()), true); .getKeyLength()), true);
assertNotNull(seeker.getKeyValue()); assertNotNull(seeker.getKeyValue());
assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker.getKeyValue().getRow()); assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1),
CellUtil.cloneRow(seeker.getKeyValue()));
// Seek before the last keyvalue; // Seek before the last keyvalue;
seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES); seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
@ -140,7 +142,8 @@ public class TestPrefixTreeEncoding {
new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
.getKeyLength()), true); .getKeyLength()), true);
assertNotNull(seeker.getKeyValue()); assertNotNull(seeker.getKeyValue());
assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker.getKeyValue().getRow()); assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1),
CellUtil.cloneRow(seeker.getKeyValue()));
} }
@Test @Test

View File

@ -18,9 +18,6 @@
*/ */
package org.apache.hadoop.hbase.io.hfile; package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
@ -512,8 +509,8 @@ public class TestHFile extends HBaseTestCase {
newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2); newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
assertTrue(keyComparator.compare(kv1, newKey) < 0); assertTrue(keyComparator.compare(kv1, newKey) < 0);
assertTrue((keyComparator.compare(kv2, newKey)) > 0); assertTrue((keyComparator.compare(kv2, newKey)) > 0);
assertTrue(Arrays.equals(newKey.getFamily(), family)); assertTrue(Arrays.equals(CellUtil.cloneFamily(newKey), family));
assertTrue(Arrays.equals(newKey.getQualifier(), qualB)); assertTrue(Arrays.equals(CellUtil.cloneQualifier(newKey), qualB));
assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP); assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP);
assertTrue(newKey.getTypeByte() == Type.Maximum.getCode()); assertTrue(newKey.getTypeByte() == Type.Maximum.getCode());

View File

@ -48,6 +48,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
@ -131,7 +132,7 @@ public class TestHFileBlock {
// generate it or repeat, it should compress well // generate it or repeat, it should compress well
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
row = keyValues.get(randomizer.nextInt(keyValues.size())).getRow(); row = CellUtil.cloneRow(keyValues.get(randomizer.nextInt(keyValues.size())));
} else { } else {
row = new byte[FIELD_LENGTH]; row = new byte[FIELD_LENGTH];
randomizer.nextBytes(row); randomizer.nextBytes(row);
@ -140,17 +141,16 @@ public class TestHFileBlock {
family = new byte[FIELD_LENGTH]; family = new byte[FIELD_LENGTH];
randomizer.nextBytes(family); randomizer.nextBytes(family);
} else { } else {
family = keyValues.get(0).getFamily(); family = CellUtil.cloneFamily(keyValues.get(0));
} }
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
qualifier = keyValues.get( qualifier = CellUtil.cloneQualifier(keyValues.get(randomizer.nextInt(keyValues.size())));
randomizer.nextInt(keyValues.size())).getQualifier();
} else { } else {
qualifier = new byte[FIELD_LENGTH]; qualifier = new byte[FIELD_LENGTH];
randomizer.nextBytes(qualifier); randomizer.nextBytes(qualifier);
} }
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
value = keyValues.get(randomizer.nextInt(keyValues.size())).getValue(); value = CellUtil.cloneValue(keyValues.get(randomizer.nextInt(keyValues.size())));
} else { } else {
value = new byte[FIELD_LENGTH]; value = new byte[FIELD_LENGTH];
randomizer.nextBytes(value); randomizer.nextBytes(value);

View File

@ -480,7 +480,7 @@ public class TestHFileBlockCompatibility {
this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream); this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream);
this.unencodedDataSizeWritten += kv.getLength(); this.unencodedDataSizeWritten += kv.getLength();
if (dataBlockEncodingCtx.getHFileContext().isIncludesMvcc()) { if (dataBlockEncodingCtx.getHFileContext().isIncludesMvcc()) {
this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getMvccVersion()); this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getSequenceId());
} }
} }

View File

@ -215,8 +215,10 @@ public class TestHFileWriterV2 {
} }
// A brute-force check to see that all keys and values are correct. // A brute-force check to see that all keys and values are correct.
assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0); KeyValue kv = keyValues.get(entriesRead);
assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0); assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
kv.getValueLength()) == 0);
++entriesRead; ++entriesRead;
} }

View File

@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
@ -251,11 +250,13 @@ public class TestHFileWriterV3 {
} }
// A brute-force check to see that all keys and values are correct. // A brute-force check to see that all keys and values are correct.
assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0); KeyValue kv = keyValues.get(entriesRead);
assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0); assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
kv.getValueLength()) == 0);
if (useTags) { if (useTags) {
assertNotNull(tagValue); assertNotNull(tagValue);
KeyValue tkv = keyValues.get(entriesRead); KeyValue tkv = kv;
assertEquals(tagValue.length, tkv.getTagsLength()); assertEquals(tagValue.length, tkv.getTagsLength());
assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(), assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(),
tkv.getTagsOffset(), tkv.getTagsLength()) == 0); tkv.getTagsOffset(), tkv.getTagsLength()) == 0);

View File

@ -107,7 +107,8 @@ public class TestSeekTo {
} }
} }
static String toRowStr(Cell kv) { static String toRowStr(Cell kv) {
return Bytes.toString(KeyValueUtil.ensureKeyValue(kv).getRow()); KeyValue c = KeyValueUtil.ensureKeyValue(kv);
return Bytes.toString(c.getRowArray(), c.getRowOffset(), c.getRowLength());
} }
Path makeNewFile(TagUsage tagUsage) throws IOException { Path makeNewFile(TagUsage tagUsage) throws IOException {

View File

@ -34,8 +34,6 @@ import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import junit.framework.Assert;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -91,6 +89,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.Mockito; import org.mockito.Mockito;
import junit.framework.Assert;
/** /**
* Simple test for {@link KeyValueSortReducer} and {@link HFileOutputFormat}. * Simple test for {@link KeyValueSortReducer} and {@link HFileOutputFormat}.
* Sets up and runs a mapreduce job that writes hfile output. * Sets up and runs a mapreduce job that writes hfile output.
@ -201,8 +201,11 @@ public class TestHFileOutputFormat {
KeyValue original = kv.clone(); KeyValue original = kv.clone();
writer.write(new ImmutableBytesWritable(), kv); writer.write(new ImmutableBytesWritable(), kv);
assertFalse(original.equals(kv)); assertFalse(original.equals(kv));
assertTrue(Bytes.equals(original.getRow(), kv.getRow())); assertTrue(Bytes.equals(original.getRowArray(), original.getRowOffset(),
assertTrue(CellUtil.matchingColumn(original, kv.getFamily(), kv.getQualifier())); original.getRowLength(), kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
assertTrue(CellUtil.matchingColumn(original, kv.getFamilyArray(), kv.getFamilyOffset(),
kv.getFamilyLength(), kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength()));
assertNotSame(original.getTimestamp(), kv.getTimestamp()); assertNotSame(original.getTimestamp(), kv.getTimestamp());
assertNotSame(HConstants.LATEST_TIMESTAMP, kv.getTimestamp()); assertNotSame(HConstants.LATEST_TIMESTAMP, kv.getTimestamp());

View File

@ -586,7 +586,7 @@ public class TestImportExport {
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
KeyValue key = (KeyValue) invocation.getArguments()[1]; KeyValue key = (KeyValue) invocation.getArguments()[1];
assertEquals("Key", Bytes.toString(writer.get())); assertEquals("Key", Bytes.toString(writer.get()));
assertEquals("row", Bytes.toString(key.getRow())); assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
return null; return null;
} }
}).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class));

View File

@ -33,11 +33,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -45,13 +45,13 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper; import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.hbase.util.LauncherSecurityManager;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
@ -151,15 +151,13 @@ public class TestWALPlayer {
WALKey key = mock(WALKey.class); WALKey key = mock(WALKey.class);
when(key.getTablename()).thenReturn(TableName.valueOf("table")); when(key.getTablename()).thenReturn(TableName.valueOf("table"));
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue>.Context context = Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue>.Context context = mock(Context.class);
mock(Context.class);
when(context.getConfiguration()).thenReturn(configuration); when(context.getConfiguration()).thenReturn(configuration);
WALEdit value = mock(WALEdit.class); WALEdit value = mock(WALEdit.class);
ArrayList<Cell> values = new ArrayList<Cell>(); ArrayList<Cell> values = new ArrayList<Cell>();
KeyValue kv1 = mock(KeyValue.class); KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), null);
when(kv1.getFamily()).thenReturn(Bytes.toBytes("family"));
when(kv1.getRow()).thenReturn(Bytes.toBytes("row"));
values.add(kv1); values.add(kv1);
when(value.getCells()).thenReturn(values); when(value.getCells()).thenReturn(values);
mapper.setup(context); mapper.setup(context);
@ -171,7 +169,7 @@ public class TestWALPlayer {
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
KeyValue key = (KeyValue) invocation.getArguments()[1]; KeyValue key = (KeyValue) invocation.getArguments()[1];
assertEquals("row", Bytes.toString(writer.get())); assertEquals("row", Bytes.toString(writer.get()));
assertEquals("row", Bytes.toString(key.getRow())); assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
return null; return null;
} }
}).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); }).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class));

View File

@ -257,9 +257,14 @@ public class TestWALRecordReader {
for (byte[] column : columns) { for (byte[] column : columns) {
assertTrue(reader.nextKeyValue()); assertTrue(reader.nextKeyValue());
Cell cell = reader.getCurrentValue().getCells().get(0); Cell cell = reader.getCurrentValue().getCells().get(0);
if (!Bytes.equals(column, cell.getQualifier())) { if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(),
assertTrue("expected [" + Bytes.toString(column) + "], actual [" cell.getQualifierOffset(), cell.getQualifierLength())) {
+ Bytes.toString(cell.getQualifier()) + "]", false); assertTrue(
"expected ["
+ Bytes.toString(column)
+ "], actual ["
+ Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength()) + "]", false);
} }
} }
assertFalse(reader.nextKeyValue()); assertFalse(reader.nextKeyValue());

View File

@ -285,7 +285,7 @@ public class TestBulkLoad {
@Override @Override
protected boolean matchesSafely(WALEdit item) { protected boolean matchesSafely(WALEdit item) {
assertTrue(Arrays.equals(item.getCells().get(0).getQualifier(), typeBytes)); assertTrue(Arrays.equals(CellUtil.cloneQualifier(item.getCells().get(0)), typeBytes));
BulkLoadDescriptor desc; BulkLoadDescriptor desc;
try { try {
desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0)); desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0));

View File

@ -25,6 +25,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -49,15 +50,18 @@ public class TestCellSkipListSet extends TestCase {
assertEquals(1, this.csls.size()); assertEquals(1, this.csls.size());
Cell first = this.csls.first(); Cell first = this.csls.first();
assertTrue(kv.equals(first)); assertTrue(kv.equals(first));
assertTrue(Bytes.equals(kv.getValue(), first.getValue())); assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(),
first.getValueArray(), first.getValueOffset(), first.getValueLength()));
// Now try overwritting // Now try overwritting
byte [] overwriteValue = Bytes.toBytes("overwrite"); byte [] overwriteValue = Bytes.toBytes("overwrite");
KeyValue overwrite = new KeyValue(bytes, bytes, bytes, overwriteValue); KeyValue overwrite = new KeyValue(bytes, bytes, bytes, overwriteValue);
this.csls.add(overwrite); this.csls.add(overwrite);
assertEquals(1, this.csls.size()); assertEquals(1, this.csls.size());
first = this.csls.first(); first = this.csls.first();
assertTrue(Bytes.equals(overwrite.getValue(), first.getValue())); assertTrue(Bytes.equals(overwrite.getValueArray(), overwrite.getValueOffset(),
assertFalse(Bytes.equals(overwrite.getValue(), kv.getValue())); overwrite.getValueLength(), first.getValueArray(), first.getValueOffset(),
first.getValueLength()));
assertFalse(Bytes.equals(CellUtil.cloneValue(overwrite), CellUtil.cloneValue(kv)));
} }
public void testIterator() throws Exception { public void testIterator() throws Exception {
@ -71,8 +75,10 @@ public class TestCellSkipListSet extends TestCase {
// Assert that we added 'total' values and that they are in order // Assert that we added 'total' values and that they are in order
int count = 0; int count = 0;
for (Cell kv: this.csls) { for (Cell kv: this.csls) {
assertEquals("" + count, Bytes.toString(kv.getQualifier())); assertEquals("" + count,
assertTrue(Bytes.equals(kv.getValue(), value1)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1,
0, value1.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -84,8 +90,10 @@ public class TestCellSkipListSet extends TestCase {
// we are getting back value2 // we are getting back value2
count = 0; count = 0;
for (Cell kv : this.csls) { for (Cell kv : this.csls) {
assertEquals("" + count, Bytes.toString(kv.getQualifier())); assertEquals("" + count,
assertTrue(Bytes.equals(kv.getValue(), value2)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2,
0, value2.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -103,8 +111,10 @@ public class TestCellSkipListSet extends TestCase {
int count = 0; int count = 0;
for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) { for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) {
Cell kv = i.next(); Cell kv = i.next();
assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); assertEquals("" + (total - (count + 1)),
assertTrue(Bytes.equals(kv.getValue(), value1)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1,
0, value1.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -117,8 +127,10 @@ public class TestCellSkipListSet extends TestCase {
count = 0; count = 0;
for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) { for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) {
Cell kv = i.next(); Cell kv = i.next();
assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); assertEquals("" + (total - (count + 1)),
assertTrue(Bytes.equals(kv.getValue(), value2)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2,
0, value2.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -145,8 +157,10 @@ public class TestCellSkipListSet extends TestCase {
this.csls.add(new KeyValue(bytes, bytes, Bytes.toBytes("" + i), value2)); this.csls.add(new KeyValue(bytes, bytes, Bytes.toBytes("" + i), value2));
} }
tail = this.csls.tailSet(splitter); tail = this.csls.tailSet(splitter);
assertTrue(Bytes.equals(tail.first().getValue(), value2)); assertTrue(Bytes.equals(tail.first().getValueArray(), tail.first().getValueOffset(),
tail.first().getValueLength(), value2, 0, value2.length));
head = this.csls.headSet(splitter); head = this.csls.headSet(splitter);
assertTrue(Bytes.equals(head.first().getValue(), value2)); assertTrue(Bytes.equals(head.first().getValueArray(), head.first().getValueOffset(),
head.first().getValueLength(), value2, 0, value2.length));
} }
} }

View File

@ -207,8 +207,8 @@ public class TestCompoundBloomFilter {
// Test for false negatives (not allowed). // Test for false negatives (not allowed).
int numChecked = 0; int numChecked = 0;
for (KeyValue kv : kvs) { for (KeyValue kv : kvs) {
byte[] row = kv.getRow(); byte[] row = CellUtil.cloneRow(kv);
boolean present = isInBloom(scanner, row, kv.getQualifier()); boolean present = isInBloom(scanner, row, CellUtil.cloneQualifier(kv));
assertTrue(testIdMsg + " Bloom filter false negative on row " assertTrue(testIdMsg + " Bloom filter false negative on row "
+ Bytes.toStringBinary(row) + " after " + numChecked + Bytes.toStringBinary(row) + " after " + numChecked
+ " successful checks", present); + " successful checks", present);
@ -358,9 +358,10 @@ public class TestCompoundBloomFilter {
KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey); KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey);
KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey); KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey);
assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp()); assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp());
assertEquals(Bytes.toStringBinary(rowKV.getRow()), assertEquals(Bytes.toStringBinary(rowKV.getRowArray(), rowKV.getRowOffset(),
Bytes.toStringBinary(rowColKV.getRow())); rowKV.getRowLength()), Bytes.toStringBinary(rowColKV.getRowArray(), rowColKV.getRowOffset(),
assertEquals(0, rowKV.getQualifier().length); rowColKV.getRowLength()));
assertEquals(0, rowKV.getQualifierLength());
} }

View File

@ -88,7 +88,9 @@ public class TestDefaultMemStore extends TestCase {
this.memstore.add(samekey); this.memstore.add(samekey);
Cell found = this.memstore.cellSet.first(); Cell found = this.memstore.cellSet.first();
assertEquals(1, this.memstore.cellSet.size()); assertEquals(1, this.memstore.cellSet.size());
assertTrue(Bytes.toString(found.getValue()), CellUtil.matchingValue(samekey, found)); assertTrue(
Bytes.toString(found.getValueArray(), found.getValueOffset(), found.getValueLength()),
CellUtil.matchingValue(samekey, found));
} }
/** /**

View File

@ -3973,8 +3973,8 @@ public class TestHRegion {
if (previousKV != null) { if (previousKV != null) {
if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) { if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) {
LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV
+ "(memStoreTS:" + previousKV.getMvccVersion() + ")" + ", New KV: " + kv + "(memStoreTS:" + previousKV.getSequenceId() + ")" + ", New KV: " + kv
+ "(memStoreTS:" + kv.getMvccVersion() + ")"); + "(memStoreTS:" + kv.getSequenceId() + ")");
assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue)); assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue));
} }
} }
@ -5132,17 +5132,20 @@ public class TestHRegion {
List<Cell> currRow = new ArrayList<Cell>(); List<Cell> currRow = new ArrayList<Cell>();
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
} finally { } finally {
@ -5189,17 +5192,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
} finally { } finally {
@ -5243,17 +5249,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
} finally { } finally {
@ -5311,17 +5320,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
@ -5332,7 +5344,8 @@ public class TestHRegion {
scanner = region.getScanner(scan); scanner = region.getScanner(scan);
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close(); scanner.close();
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -5391,17 +5404,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
@ -5412,7 +5428,8 @@ public class TestHRegion {
scanner = region.getScanner(scan); scanner = region.getScanner(scan);
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close(); scanner.close();
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -5536,42 +5553,49 @@ public class TestHRegion {
// "row4" takes 2 next() calls since batch=3 // "row4" takes 2 next() calls since batch=3
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(3, currRow.size()); assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow.get(0).getRowLength(), row4, 0,
row4.length));
assertTrue(hasNext); assertTrue(hasNext);
// 2. scan out "row3" (2 kv) // 2. scan out "row3" (2 kv)
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row3)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext); assertTrue(hasNext);
// 3. scan out "row2" (4 kvs) // 3. scan out "row2" (4 kvs)
// "row2" takes 2 next() calls since batch=3 // "row2" takes 2 next() calls since batch=3
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(3, currRow.size()); assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext); assertTrue(hasNext);
// 4. scan out "row1" (2 kv) // 4. scan out "row1" (2 kv)
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertTrue(hasNext); assertTrue(hasNext);
// 5. scan out "row0" (1 kv) // 5. scan out "row0" (1 kv)
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row0)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row0, 0, row0.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
@ -5632,22 +5656,26 @@ public class TestHRegion {
List<Cell> currRow = new ArrayList<Cell>(); List<Cell> currRow = new ArrayList<Cell>();
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row3)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertFalse(hasNext); assertFalse(hasNext);
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -5699,7 +5727,8 @@ public class TestHRegion {
int verify = startRow + 2 * numRows - 1; int verify = startRow + 2 * numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);
@ -5712,7 +5741,8 @@ public class TestHRegion {
verify = startRow + 2 * numRows - 1; verify = startRow + 2 * numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);
@ -5725,7 +5755,8 @@ public class TestHRegion {
verify = startRow + numRows - 1; verify = startRow + numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);
@ -5738,7 +5769,8 @@ public class TestHRegion {
verify = startRow + numRows - 1; verify = startRow + numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);

View File

@ -279,7 +279,7 @@ public class TestHRegionReplayEvents {
if (WALEdit.isMetaEditFamily(entry.getEdit().getCells().get(0))) { if (WALEdit.isMetaEditFamily(entry.getEdit().getCells().get(0))) {
return 0; // handled elsewhere return 0; // handled elsewhere
} }
Put put = new Put(entry.getEdit().getCells().get(0).getRow()); Put put = new Put(CellUtil.cloneRow(entry.getEdit().getCells().get(0)));
for (Cell cell : entry.getEdit().getCells()) put.add(cell); for (Cell cell : entry.getEdit().getCells()) put.add(cell);
put.setDurability(Durability.SKIP_WAL); put.setDurability(Durability.SKIP_WAL);
MutationReplay mutation = new MutationReplay(MutationType.PUT, put, 0, 0); MutationReplay mutation = new MutationReplay(MutationType.PUT, put, 0, 0);

View File

@ -355,7 +355,7 @@ public class TestMajorCompaction {
HFileScanner scanner = f.getReader().getScanner(false, false); HFileScanner scanner = f.getReader().getScanner(false, false);
scanner.seekTo(); scanner.seekTo();
do { do {
byte [] row = scanner.getCell().getRow(); byte [] row = CellUtil.cloneRow(scanner.getCell());
if (Bytes.equals(row, STARTROW)) { if (Bytes.equals(row, STARTROW)) {
count1++; count1++;
} else if(Bytes.equals(row, secondRowBytes)) { } else if(Bytes.equals(row, secondRowBytes)) {
@ -457,7 +457,7 @@ public class TestMajorCompaction {
List<Cell> results = new ArrayList<Cell>(); List<Cell> results = new ArrayList<Cell>();
boolean result = s.next(results); boolean result = s.next(results);
assertTrue(!results.isEmpty()); assertTrue(!results.isEmpty());
r.delete(new Delete(results.get(0).getRow())); r.delete(new Delete(CellUtil.cloneRow(results.get(0))));
if (!result) break; if (!result) break;
} while (true); } while (true);
s.close(); s.close();

View File

@ -21,38 +21,38 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import junit.framework.Assert;
@Category({RegionServerTests.class, MediumTests.class}) @Category({RegionServerTests.class, MediumTests.class})
public class TestScannerWithBulkload { public class TestScannerWithBulkload {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@ -100,11 +100,16 @@ public class TestScannerWithBulkload {
while (result != null) { while (result != null) {
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
for (Cell _c : cells) { for (Cell _c : cells) {
if (Bytes.toString(_c.getRow()).equals("row1")) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())
System.out.println(Bytes.toString(_c.getRow())); .equals("row1")) {
System.out.println(Bytes.toString(_c.getQualifier())); System.out
System.out.println(Bytes.toString(_c.getValue())); .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()));
Assert.assertEquals("version3", Bytes.toString(_c.getValue())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(),
_c.getQualifierLength()));
System.out.println(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
Assert.assertEquals("version3",
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
} }
} }
result = scanner.next(); result = scanner.next();
@ -118,11 +123,16 @@ public class TestScannerWithBulkload {
while (result != null) { while (result != null) {
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
for (Cell _c : cells) { for (Cell _c : cells) {
if (Bytes.toString(_c.getRow()).equals("row1")) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())
System.out.println(Bytes.toString(_c.getRow())); .equals("row1")) {
System.out.println(Bytes.toString(_c.getQualifier())); System.out
System.out.println(Bytes.toString(_c.getValue())); .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()));
Assert.assertEquals(expctedVal, Bytes.toString(_c.getValue())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(),
_c.getQualifierLength()));
System.out.println(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
Assert.assertEquals(expctedVal,
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
} }
} }
result = scanner.next(); result = scanner.next();
@ -191,7 +201,9 @@ public class TestScannerWithBulkload {
Result result = scanner.next(); Result result = scanner.next();
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
Assert.assertEquals(1, cells.size()); Assert.assertEquals(1, cells.size());
Assert.assertEquals("version1", Bytes.toString(cells.get(0).getValue())); Cell _c = cells.get(0);
Assert.assertEquals("version1",
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
scanner.close(); scanner.close();
return table; return table;
} }
@ -270,11 +282,16 @@ public class TestScannerWithBulkload {
while (result != null) { while (result != null) {
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
for (Cell _c : cells) { for (Cell _c : cells) {
if (Bytes.toString(_c.getRow()).equals("row1")) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())
System.out.println(Bytes.toString(_c.getRow())); .equals("row1")) {
System.out.println(Bytes.toString(_c.getQualifier())); System.out
System.out.println(Bytes.toString(_c.getValue())); .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()));
Assert.assertEquals("version3", Bytes.toString(_c.getValue())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(),
_c.getQualifierLength()));
System.out.println(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
Assert.assertEquals("version3",
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
} }
} }
result = scanner.next(); result = scanner.next();

View File

@ -34,14 +34,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HFileLink;
@ -54,6 +53,8 @@ import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ChecksumType;
@ -171,9 +172,9 @@ public class TestStoreFile extends HBaseTestCase {
// may be in middle of row. Create new one with empty column and // may be in middle of row. Create new one with empty column and
// timestamp. // timestamp.
Cell kv = reader.midkey(); Cell kv = reader.midkey();
byte [] midRow = kv.getRow(); byte [] midRow = CellUtil.cloneRow(kv);
kv = reader.getLastKey(); kv = reader.getLastKey();
byte [] finalRow = kv.getRow(); byte [] finalRow = CellUtil.cloneRow(kv);
// Make a reference // Make a reference
HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow); HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow);
Path refPath = splitStoreFile(regionFs, splitHri, TEST_FAMILY, hsf, midRow, true); Path refPath = splitStoreFile(regionFs, splitHri, TEST_FAMILY, hsf, midRow, true);
@ -186,11 +187,13 @@ public class TestStoreFile extends HBaseTestCase {
ByteBuffer bb = ByteBuffer.wrap(((KeyValue) s.getKey()).getKey()); ByteBuffer bb = ByteBuffer.wrap(((KeyValue) s.getKey()).getKey());
kv = KeyValueUtil.createKeyValueFromKey(bb); kv = KeyValueUtil.createKeyValueFromKey(bb);
if (first) { if (first) {
assertTrue(Bytes.equals(kv.getRow(), midRow)); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), midRow, 0,
midRow.length));
first = false; first = false;
} }
} }
assertTrue(Bytes.equals(kv.getRow(), finalRow)); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), finalRow, 0,
finalRow.length));
} }
@Test @Test
@ -316,7 +319,7 @@ public class TestStoreFile extends HBaseTestCase {
throws IOException { throws IOException {
Cell midkey = f.createReader().midkey(); Cell midkey = f.createReader().midkey();
KeyValue midKV = (KeyValue)midkey; KeyValue midKV = (KeyValue)midkey;
byte [] midRow = midKV.getRow(); byte [] midRow = CellUtil.cloneRow(midKV);
// Create top split. // Create top split.
HRegionInfo topHri = new HRegionInfo(regionFs.getRegionInfo().getTable(), HRegionInfo topHri = new HRegionInfo(regionFs.getRegionInfo().getTable(),
null, midRow); null, midRow);
@ -402,7 +405,8 @@ public class TestStoreFile extends HBaseTestCase {
first = false; first = false;
KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("First top when key < bottom: " + keyKV); LOG.info("First top when key < bottom: " + keyKV);
String tmp = Bytes.toString(keyKV.getRow()); String tmp =
Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'a'); assertTrue(tmp.charAt(i) == 'a');
} }
@ -410,7 +414,7 @@ public class TestStoreFile extends HBaseTestCase {
} }
KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("Last top when key < bottom: " + keyKV); LOG.info("Last top when key < bottom: " + keyKV);
String tmp = Bytes.toString(keyKV.getRow()); String tmp = Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'z'); assertTrue(tmp.charAt(i) == 'z');
} }
@ -434,7 +438,7 @@ public class TestStoreFile extends HBaseTestCase {
first = false; first = false;
keyKV = KeyValueUtil.createKeyValueFromKey(key); keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("First bottom when key > top: " + keyKV); LOG.info("First bottom when key > top: " + keyKV);
tmp = Bytes.toString(keyKV.getRow()); tmp = Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'a'); assertTrue(tmp.charAt(i) == 'a');
} }
@ -443,7 +447,8 @@ public class TestStoreFile extends HBaseTestCase {
keyKV = KeyValueUtil.createKeyValueFromKey(key); keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("Last bottom when key > top: " + keyKV); LOG.info("Last bottom when key > top: " + keyKV);
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(Bytes.toString(keyKV.getRow()).charAt(i) == 'z'); assertTrue(Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength())
.charAt(i) == 'z');
} }
} finally { } finally {
if (top != null) { if (top != null) {

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -175,7 +176,7 @@ public class TestStripeCompactor {
List<byte[]> boundaries = new ArrayList<byte[]>(); List<byte[]> boundaries = new ArrayList<byte[]>();
boundaries.add(left); boundaries.add(left);
for (int i = 1; i < output.length; ++i) { for (int i = 1; i < output.length; ++i) {
boundaries.add(output[i][0].getRow()); boundaries.add(CellUtil.cloneRow(output[i][0]));
} }
boundaries.add(right); boundaries.add(right);
writers.verifyBoundaries(boundaries.toArray(new byte[][] {})); writers.verifyBoundaries(boundaries.toArray(new byte[][] {}));

View File

@ -34,14 +34,11 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -56,6 +53,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -568,15 +567,16 @@ public class TestTags {
for (Cell cell : edits) { for (Cell cell : edits) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (cf == null) { if (cf == null) {
cf = kv.getFamily(); cf = CellUtil.cloneFamily(kv);
} }
Tag tag = new Tag((byte) 1, attribute); Tag tag = new Tag((byte) 1, attribute);
List<Tag> tagList = new ArrayList<Tag>(); List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag); tagList.add(tag);
KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, KeyValue newKV = new KeyValue(CellUtil.cloneRow(kv), 0, kv.getRowLength(),
kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), CellUtil.cloneFamily(kv), 0, kv.getFamilyLength(), CellUtil.cloneQualifier(kv), 0,
kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, kv.getQualifierLength(), kv.getTimestamp(),
KeyValue.Type.codeToType(kv.getTypeByte()), CellUtil.cloneValue(kv), 0,
kv.getValueLength(), tagList); kv.getValueLength(), tagList);
((List<Cell>) updatedCells).add(newKV); ((List<Cell>) updatedCells).add(newKV);
} }

View File

@ -510,7 +510,8 @@ public class TestLogRolling {
while ((entry = reader.next()) != null) { while ((entry = reader.next()) != null) {
LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getCells()); LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getCells());
for (Cell cell : entry.getEdit().getCells()) { for (Cell cell : entry.getEdit().getCells()) {
loggedRows.add(Bytes.toStringBinary(cell.getRow())); loggedRows.add(Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength()));
} }
} }
} catch (EOFException e) { } catch (EOFException e) {

View File

@ -26,12 +26,11 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -62,8 +61,6 @@ import org.junit.rules.TestName;
*/ */
@Category({RegionServerTests.class, MediumTests.class}) @Category({RegionServerTests.class, MediumTests.class})
public class TestProtobufLog { public class TestProtobufLog {
private static final Log LOG = LogFactory.getLog(TestProtobufLog.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected FileSystem fs; protected FileSystem fs;
@ -189,9 +186,10 @@ public class TestProtobufLog {
assertEquals(tableName, entry.getKey().getTablename()); assertEquals(tableName, entry.getKey().getTablename());
int idx = 0; int idx = 0;
for (Cell val : entry.getEdit().getCells()) { for (Cell val : entry.getEdit().getCells()) {
assertTrue(Bytes.equals(row, val.getRow())); assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(),
val.getRowLength()));
String value = i + "" + idx; String value = i + "" + idx;
assertArrayEquals(Bytes.toBytes(value), val.getValue()); assertArrayEquals(Bytes.toBytes(value), CellUtil.cloneValue(val));
idx++; idx++;
} }
} }

View File

@ -29,6 +29,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -43,8 +44,6 @@ import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -208,15 +207,16 @@ public class TestReplicationWithTags {
for (Cell cell : edits) { for (Cell cell : edits) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (cf == null) { if (cf == null) {
cf = kv.getFamily(); cf = CellUtil.cloneFamily(kv);
} }
Tag tag = new Tag(TAG_TYPE, attribute); Tag tag = new Tag(TAG_TYPE, attribute);
List<Tag> tagList = new ArrayList<Tag>(); List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag); tagList.add(tag);
KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, KeyValue newKV = new KeyValue(CellUtil.cloneRow(kv), 0, kv.getRowLength(),
kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), CellUtil.cloneFamily(kv), 0, kv.getFamilyLength(), CellUtil.cloneQualifier(kv), 0,
kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, kv.getQualifierLength(), kv.getTimestamp(),
KeyValue.Type.codeToType(kv.getTypeByte()), CellUtil.cloneValue(kv), 0,
kv.getValueLength(), tagList); kv.getValueLength(), tagList);
((List<Cell>) updatedCells).add(newKV); ((List<Cell>) updatedCells).add(newKV);
} }

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.replication.regionserver;
import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion; import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion;
import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.openRegion; import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.openRegion;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -28,9 +28,9 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -49,12 +48,9 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster; import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContext; import org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContext;
@ -65,6 +61,8 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALKey;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
@ -82,9 +80,6 @@ import com.google.common.collect.Lists;
@Category({ReplicationTests.class, MediumTests.class}) @Category({ReplicationTests.class, MediumTests.class})
public class TestRegionReplicaReplicationEndpointNoMaster { public class TestRegionReplicaReplicationEndpointNoMaster {
private static final Log LOG = LogFactory.getLog(
TestRegionReplicaReplicationEndpointNoMaster.class);
private static final int NB_SERVERS = 2; private static final int NB_SERVERS = 2;
private static TableName tableName = TableName.valueOf( private static TableName tableName = TableName.valueOf(
TestRegionReplicaReplicationEndpointNoMaster.class.getSimpleName()); TestRegionReplicaReplicationEndpointNoMaster.class.getSimpleName());
@ -193,7 +188,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
throws IOException, RuntimeException { throws IOException, RuntimeException {
Entry entry; Entry entry;
while ((entry = entries.poll()) != null) { while ((entry = entries.poll()) != null) {
byte[] row = entry.getEdit().getCells().get(0).getRow(); byte[] row = CellUtil.cloneRow(entry.getEdit().getCells().get(0));
RegionLocations locations = connection.locateRegion(tableName, row, true, true); RegionLocations locations = connection.locateRegion(tableName, row, true, true);
RegionReplicaReplayCallable callable = new RegionReplicaReplayCallable(connection, RegionReplicaReplayCallable callable = new RegionReplicaReplayCallable(connection,
RpcControllerFactory.instantiate(connection.getConfiguration()), RpcControllerFactory.instantiate(connection.getConfiguration()),
@ -298,7 +293,9 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
Assert.assertEquals(1000, entries.size()); Assert.assertEquals(1000, entries.size());
for (Entry e: entries) { for (Entry e: entries) {
if (Integer.parseInt(Bytes.toString(e.getEdit().getCells().get(0).getValue())) % 2 == 0) { Cell _c = e.getEdit().getCells().get(0);
if (Integer.parseInt(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())) % 2 == 0) {
e.getKey().setOrigLogSeqNum(1); // simulate dist log replay by setting orig seq id e.getKey().setOrigLogSeqNum(1); // simulate dist log replay by setting orig seq id
} }
} }

View File

@ -137,20 +137,26 @@ public class TestDefaultScanLabelGeneratorStack {
Cell current = cellScanner.current(); Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q1)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value1)); current.getQualifierLength(), Q1, 0, Q1.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value1, 0, value1.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
return null; return null;
} }
@ -173,15 +179,19 @@ public class TestDefaultScanLabelGeneratorStack {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
// Test scan with correct auth attribute for test user // Test scan with correct auth attribute for test user
Scan s1 = new Scan(); Scan s1 = new Scan();
@ -198,15 +208,19 @@ public class TestDefaultScanLabelGeneratorStack {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q2)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value2)); current1.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value2, 0, value2.length));
cellScanner1.advance(); cellScanner1.advance();
current1 = cellScanner1.current(); current1 = cellScanner1.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value3)); current1.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value3, 0, value3.length));
// Test scan with incorrect auth attribute for test user // Test scan with incorrect auth attribute for test user
Scan s2 = new Scan(); Scan s2 = new Scan();
@ -221,8 +235,10 @@ public class TestDefaultScanLabelGeneratorStack {
// This scan will only see value3 (no label) // This scan will only see value3 (no label)
assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(), assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(),
current2.getRowLength(), ROW_1, 0, ROW_1.length)); current2.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current2.getQualifier(), Q3)); assertTrue(Bytes.equals(current2.getQualifierArray(), current2.getQualifierOffset(),
assertTrue(Bytes.equals(current2.getValue(), value3)); current2.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current2.getValueArray(), current2.getValueOffset(),
current2.getValueLength(), value3, 0, value3.length));
assertFalse(cellScanner2.advance()); assertFalse(cellScanner2.advance());

View File

@ -50,7 +50,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
@ -406,7 +405,7 @@ public class TestVisibilityLabelsReplication {
for (Cell cell : edits) { for (Cell cell : edits) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (cf == null) { if (cf == null) {
cf = kv.getFamily(); cf = CellUtil.cloneFamily(kv);
} }
Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute); Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute);
List<Tag> tagList = new ArrayList<Tag>(); List<Tag> tagList = new ArrayList<Tag>();
@ -414,10 +413,6 @@ public class TestVisibilityLabelsReplication {
tagList.addAll(kv.getTags()); tagList.addAll(kv.getTags());
byte[] fromList = Tag.fromList(tagList); byte[] fromList = Tag.fromList(tagList);
TagRewriteCell newcell = new TagRewriteCell(kv, fromList); TagRewriteCell newcell = new TagRewriteCell(kv, fromList);
KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0,
kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(),
kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0,
kv.getValueLength(), tagList);
((List<Cell>) updatedCells).add(newcell); ((List<Cell>) updatedCells).add(newcell);
} }
} }

View File

@ -151,20 +151,26 @@ public class TestVisibilityLablesWithGroups {
Cell current = cellScanner.current(); Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q1)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value1)); current.getQualifierLength(), Q1, 0, Q1.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value1, 0, value1.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
} }
return null; return null;
} }
@ -206,15 +212,19 @@ public class TestVisibilityLablesWithGroups {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
// Test scan with correct auth attribute for test user // Test scan with correct auth attribute for test user
Scan s1 = new Scan(); Scan s1 = new Scan();
@ -231,15 +241,19 @@ public class TestVisibilityLablesWithGroups {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q2)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value2)); current1.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value2, 0, value2.length));
cellScanner1.advance(); cellScanner1.advance();
current1 = cellScanner1.current(); current1 = cellScanner1.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value3)); current1.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value3, 0, value3.length));
// Test scan with incorrect auth attribute for test user // Test scan with incorrect auth attribute for test user
Scan s2 = new Scan(); Scan s2 = new Scan();
@ -254,8 +268,10 @@ public class TestVisibilityLablesWithGroups {
// This scan will only see value3 (no label) // This scan will only see value3 (no label)
assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(), assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(),
current2.getRowLength(), ROW_1, 0, ROW_1.length)); current2.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current2.getQualifier(), Q3)); assertTrue(Bytes.equals(current2.getQualifierArray(), current2.getQualifierOffset(),
assertTrue(Bytes.equals(current2.getValue(), value3)); current2.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current2.getValueArray(), current2.getValueOffset(),
current2.getValueLength(), value3, 0, value3.length));
assertFalse(cellScanner2.advance()); assertFalse(cellScanner2.advance());
} }
@ -315,8 +331,10 @@ public class TestVisibilityLablesWithGroups {
// test user can only see value3 (no label) // test user can only see value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value3)); current1.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value3, 0, value3.length));
assertFalse(cellScanner1.advance()); assertFalse(cellScanner1.advance());
} }

View File

@ -29,24 +29,22 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.NavigableSet; import java.util.NavigableSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@ -55,9 +53,9 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -66,7 +64,6 @@ import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.Parameters;
@ -74,7 +71,6 @@ import org.junit.runners.Parameterized.Parameters;
@Category({MiscTests.class, MediumTests.class}) @Category({MiscTests.class, MediumTests.class})
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class TestCoprocessorScanPolicy { public class TestCoprocessorScanPolicy {
private static final Log LOG = LogFactory.getLog(TestCoprocessorScanPolicy.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] F = Bytes.toBytes("fam"); private static final byte[] F = Bytes.toBytes("fam");
private static final byte[] Q = Bytes.toBytes("qual"); private static final byte[] Q = Bytes.toBytes("qual");
@ -229,12 +225,16 @@ public class TestCoprocessorScanPolicy {
if (put.getAttribute("ttl") != null) { if (put.getAttribute("ttl") != null) {
Cell cell = put.getFamilyCellMap().values().iterator().next().get(0); Cell cell = put.getFamilyCellMap().values().iterator().next().get(0);
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
ttls.put(TableName.valueOf(kv.getQualifier()), Bytes.toLong(kv.getValue())); ttls.put(TableName.valueOf(
Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())),
Bytes.toLong(CellUtil.cloneValue(kv)));
c.bypass(); c.bypass();
} else if (put.getAttribute("versions") != null) { } else if (put.getAttribute("versions") != null) {
Cell cell = put.getFamilyCellMap().values().iterator().next().get(0); Cell cell = put.getFamilyCellMap().values().iterator().next().get(0);
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
versions.put(TableName.valueOf(kv.getQualifier()), Bytes.toInt(kv.getValue())); versions.put(TableName.valueOf(
Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())),
Bytes.toInt(CellUtil.cloneValue(kv)));
c.bypass(); c.bypass();
} }
} }

View File

@ -39,19 +39,16 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -123,12 +120,12 @@ public class TestSecureWAL {
List<Cell> cells = entry.getEdit().getCells(); List<Cell> cells = entry.getEdit().getCells();
assertTrue("Should be one KV per WALEdit", cells.size() == 1); assertTrue("Should be one KV per WALEdit", cells.size() == 1);
for (Cell cell: cells) { for (Cell cell: cells) {
byte[] thisRow = cell.getRow(); assertTrue("Incorrect row", Bytes.equals(cell.getRowArray(), cell.getRowOffset(),
assertTrue("Incorrect row", Bytes.equals(thisRow, row)); cell.getRowLength(), row, 0, row.length));
byte[] thisFamily = cell.getFamily(); assertTrue("Incorrect family", Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(),
assertTrue("Incorrect family", Bytes.equals(thisFamily, family)); cell.getFamilyLength(), family, 0, family.length));
byte[] thisValue = cell.getValue(); assertTrue("Incorrect value", Bytes.equals(cell.getValueArray(), cell.getValueOffset(),
assertTrue("Incorrect value", Bytes.equals(thisValue, value)); cell.getValueLength(), value, 0, value.length));
} }
} }
assertEquals("Should have read back as many KVs as written", total, count); assertEquals("Should have read back as many KVs as written", total, count);

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -51,6 +52,12 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver; import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -68,14 +75,6 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
/** /**
* WAL tests that can be reused across providers. * WAL tests that can be reused across providers.
*/ */
@ -521,8 +520,9 @@ public class TestWALFactory {
assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName())); assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName()));
assertTrue(htd.getTableName().equals(key.getTablename())); assertTrue(htd.getTableName().equals(key.getTablename()));
Cell cell = val.getCells().get(0); Cell cell = val.getCells().get(0);
assertTrue(Bytes.equals(row, cell.getRow())); assertTrue(Bytes.equals(row, 0, row.length, cell.getRowArray(), cell.getRowOffset(),
assertEquals((byte)(i + '0'), cell.getValue()[0]); cell.getRowLength()));
assertEquals((byte)(i + '0'), CellUtil.cloneValue(cell)[0]);
System.out.println(key + " " + val); System.out.println(key + " " + val);
} }
} finally { } finally {
@ -574,8 +574,9 @@ public class TestWALFactory {
assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(), assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(),
entry.getKey().getEncodedRegionName())); entry.getKey().getEncodedRegionName()));
assertTrue(htd.getTableName().equals(entry.getKey().getTablename())); assertTrue(htd.getTableName().equals(entry.getKey().getTablename()));
assertTrue(Bytes.equals(row, val.getRow())); assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(),
assertEquals((byte)(idx + '0'), val.getValue()[0]); val.getRowLength()));
assertEquals((byte) (idx + '0'), CellUtil.cloneValue(val)[0]);
System.out.println(entry.getKey() + " " + val); System.out.println(entry.getKey() + " " + val);
idx++; idx++;
} }
@ -687,9 +688,10 @@ public class TestWALFactory {
assertEquals(tableName, entry.getKey().getTablename()); assertEquals(tableName, entry.getKey().getTablename());
int idx = 0; int idx = 0;
for (Cell val : entry.getEdit().getCells()) { for (Cell val : entry.getEdit().getCells()) {
assertTrue(Bytes.equals(row, val.getRow())); assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(),
val.getRowLength()));
String value = i + "" + idx; String value = i + "" + idx;
assertArrayEquals(Bytes.toBytes(value), val.getValue()); assertArrayEquals(Bytes.toBytes(value), CellUtil.cloneValue(val));
idx++; idx++;
} }
} }

View File

@ -41,13 +41,6 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.log4j.Level;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -62,18 +55,24 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.regionserver.wal.FaultySequenceFileLogReader;
import org.apache.hadoop.hbase.wal.WAL.Reader; import org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter;
import org.apache.hadoop.hbase.wal.WALProvider.Writer; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WAL.Reader;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -82,9 +81,9 @@ import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Rule; import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
@ -92,12 +91,6 @@ import org.mockito.stubbing.Answer;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.FaultySequenceFileLogReader;
/** /**
* Testing {@link WAL} splitting code. * Testing {@link WAL} splitting code.
*/ */
@ -957,7 +950,8 @@ public class TestWALSplit {
Cell cell = cells.get(0); Cell cell = cells.get(0);
// Check that the edits come in the right order. // Check that the edits come in the right order.
assertEquals(expectedIndex, Bytes.toInt(cell.getRow())); assertEquals(expectedIndex, Bytes.toInt(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength()));
expectedIndex++; expectedIndex++;
return null; return null;
} }

View File

@ -224,7 +224,8 @@ EOF
# Fetch cell value # Fetch cell value
cell = result.listCells[0] cell = result.listCells[0]
org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue) org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray,
cell.getValueOffset, cell.getValueLength)
end end
#---------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------
@ -371,8 +372,10 @@ EOF
# Print out results. Result can be Cell or RowResult. # Print out results. Result can be Cell or RowResult.
res = {} res = {}
result.listCells.each do |c| result.listCells.each do |c|
family = String.from_java_bytes(c.getFamily) family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray,
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifier) c.getFamilyOffset, c.getFamilyLength)
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray,
c.getQualifierOffset, c.getQualifierLength)
column = "#{family}:#{qualifier}" column = "#{family}:#{qualifier}"
value = to_string(column, c, maxlength) value = to_string(column, c, maxlength)
@ -403,7 +406,8 @@ EOF
# Fetch cell value # Fetch cell value
cell = result.listCells[0] cell = result.listCells[0]
org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue) org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray,
cell.getValueOffset, cell.getValueLength)
end end
def _hash_to_scan(args) def _hash_to_scan(args)
@ -505,8 +509,10 @@ EOF
key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow) key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow)
row.listCells.each do |c| row.listCells.each do |c|
family = String.from_java_bytes(c.getFamily) family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray,
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifier) c.getFamilyOffset, c.getFamilyLength)
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray,
c.getQualifierOffset, c.getQualifierLength)
column = "#{family}:#{qualifier}" column = "#{family}:#{qualifier}"
cell = to_string(column, c, maxlength) cell = to_string(column, c, maxlength)
@ -640,14 +646,17 @@ EOF
def to_string(column, kv, maxlength = -1) def to_string(column, kv, maxlength = -1)
if is_meta_table? if is_meta_table?
if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB' if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB'
hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValue) hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValueArray,
kv.getValueOffset, kv.getValueLength)
return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString] return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString]
end end
if column == 'info:serverstartcode' if column == 'info:serverstartcode'
if kv.getValue.length > 0 if kv.getValue.length > 0
str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValue) str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValueArray,
kv.getValueOffset, kv.getValueLength)
else else
str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValue) str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValueArray,
kv.getValueOffset, kv.getValueLength)
end end
return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val] return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val]
end end
@ -679,7 +688,7 @@ EOF
end end
end end
method = eval(klazz_name).method(converter) method = eval(klazz_name).method(converter)
return method.call(kv.getValue) # apply the converter return method.call(org.apache.hadoop.hbase.CellUtil.cloneValue(kv)) # apply the converter
end end
# if the column spec contains CONVERTER information, to get rid of :CONVERTER info from column pair. # if the column spec contains CONVERTER information, to get rid of :CONVERTER info from column pair.