HBASE-14047 - Cleanup deprecated APIs from Cell class (Ashish Singhi)

This commit is contained in:
ramkrishna 2015-07-13 22:06:16 +05:30
parent 951ec7a0b7
commit a3d30892b4
76 changed files with 752 additions and 1047 deletions

View File

@ -138,12 +138,6 @@ public class TestPayloadCarryingRpcController {
return 0; return 0;
} }
@Override
public long getMvccVersion() {
// unused
return 0;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
// unused // unused
@ -182,30 +176,6 @@ public class TestPayloadCarryingRpcController {
// unused // unused
return null; return null;
} }
@Override
public byte[] getValue() {
// unused
return null;
}
@Override
public byte[] getFamily() {
// unused
return null;
}
@Override
public byte[] getQualifier() {
// unused
return null;
}
@Override
public byte[] getRow() {
// unused
return null;
}
}; };
} }

View File

@ -44,12 +44,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* the goal of sorting newer cells first. * the goal of sorting newer cells first.
* </p> * </p>
* <p> * <p>
* This interface should not include methods that allocate new byte[]'s such as those used in client
* or debugging code. These users should use the methods found in the {@link CellUtil} class.
* Currently for to minimize the impact of existing applications moving between 0.94 and 0.96, we
* include the costly helper methods marked as deprecated.
* </p>
* <p>
* Cell implements Comparable&lt;Cell&gt; which is only meaningful when * Cell implements Comparable&lt;Cell&gt; which is only meaningful when
* comparing to other keys in the * comparing to other keys in the
* same table. It uses CellComparator which does not work on the -ROOT- and hbase:meta tables. * same table. It uses CellComparator which does not work on the -ROOT- and hbase:meta tables.
@ -146,19 +140,7 @@ public interface Cell {
byte getTypeByte(); byte getTypeByte();
//6) MvccVersion //6) SequenceId
/**
* @deprecated as of 1.0, use {@link Cell#getSequenceId()}
*
* Internal use only. A region-specific sequence ID given to each operation. It always exists for
* cells in the memstore but is not retained forever. It may survive several flushes, but
* generally becomes irrelevant after the cell's row is no longer involved in any operations that
* require strict consistency.
* @return mvccVersion (always &gt;= 0 if exists), or 0 if it no longer exists
*/
@Deprecated
long getMvccVersion();
/** /**
* A region-specific unique monotonically increasing sequence ID given to each Cell. It always * A region-specific unique monotonically increasing sequence ID given to each Cell. It always
@ -187,7 +169,7 @@ public interface Cell {
* @return Number of value bytes. Must be &lt; valueArray.length - offset. * @return Number of value bytes. Must be &lt; valueArray.length - offset.
*/ */
int getValueLength(); int getValueLength();
/** /**
* @return the tags byte array * @return the tags byte array
*/ */
@ -202,44 +184,4 @@ public interface Cell {
* @return the total length of the tags in the Cell. * @return the total length of the tags in the Cell.
*/ */
int getTagsLength(); int getTagsLength();
/**
* WARNING do not use, expensive. This gets an arraycopy of the cell's value.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#cloneValue(Cell)}
*/
@Deprecated
byte[] getValue();
/**
* WARNING do not use, expensive. This gets an arraycopy of the cell's family.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#cloneFamily(Cell)}
*/
@Deprecated
byte[] getFamily();
/**
* WARNING do not use, expensive. This gets an arraycopy of the cell's qualifier.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#cloneQualifier(Cell)}
*/
@Deprecated
byte[] getQualifier();
/**
* WARNING do not use, expensive. this gets an arraycopy of the cell's row.
*
* Added to ease transition from 0.94 -&gt; 0.96.
*
* @deprecated as of 0.96, use {@link CellUtil#getRowByte(Cell, int)}
*/
@Deprecated
byte[] getRow();
} }

View File

@ -1335,11 +1335,6 @@ public final class CellUtil {
return 0; return 0;
} }
@Override
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return 0; return 0;
@ -1374,26 +1369,6 @@ public final class CellUtil {
public int getTagsLength() { public int getTagsLength() {
return 0; return 0;
} }
@Override
public byte[] getValue() {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] getFamily() {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] getQualifier() {
return EMPTY_BYTE_ARRAY;
}
@Override
public byte[] getRow() {
return EMPTY_BYTE_ARRAY;
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1432,11 +1407,6 @@ public final class CellUtil {
public byte getTypeByte() { public byte getTypeByte() {
return Type.Maximum.getCode(); return Type.Maximum.getCode();
} }
@Override
public byte[] getRow() {
return Bytes.copy(this.rowArray, this.roffset, this.rlength);
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1488,16 +1458,6 @@ public final class CellUtil {
public int getQualifierLength() { public int getQualifierLength() {
return this.qlength; return this.qlength;
} }
@Override
public byte[] getFamily() {
return Bytes.copy(this.fArray, this.foffset, this.flength);
}
@Override
public byte[] getQualifier() {
return Bytes.copy(this.qArray, this.qoffset, this.qlength);
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1553,11 +1513,6 @@ public final class CellUtil {
public byte getTypeByte() { public byte getTypeByte() {
return Type.Minimum.getCode(); return Type.Minimum.getCode();
} }
@Override
public byte[] getRow() {
return Bytes.copy(this.rowArray, this.roffset, this.rlength);
}
} }
@InterfaceAudience.Private @InterfaceAudience.Private
@ -1609,15 +1564,5 @@ public final class CellUtil {
public int getQualifierLength() { public int getQualifierLength() {
return this.qlength; return this.qlength;
} }
@Override
public byte[] getFamily() {
return Bytes.copy(this.fArray, this.foffset, this.flength);
}
@Override
public byte[] getQualifier() {
return Bytes.copy(this.qArray, this.qoffset, this.qlength);
}
} }
} }

View File

@ -50,12 +50,13 @@ import com.google.common.annotations.VisibleForTesting;
/** /**
* An HBase Key/Value. This is the fundamental HBase Type. * An HBase Key/Value. This is the fundamental HBase Type.
* <p> * <p>
* HBase applications and users should use the Cell interface and avoid directly using KeyValue * HBase applications and users should use the Cell interface and avoid directly using KeyValue and
* and member functions not defined in Cell. * member functions not defined in Cell.
* <p> * <p>
* If being used client-side, the primary methods to access individual fields are {@link #getRow()}, * If being used client-side, the primary methods to access individual fields are
* {@link #getFamily()}, {@link #getQualifier()}, {@link #getTimestamp()}, and {@link #getValue()}. * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()},
* These methods allocate new byte arrays and return copies. Avoid their use server-side. * {@link #getTimestamp()}, and {@link #getValueArray()}. These methods allocate new byte arrays
* and return copies. Avoid their use server-side.
* <p> * <p>
* Instances of this class are immutable. They do not implement Comparable but Comparators are * Instances of this class are immutable. They do not implement Comparable but Comparators are
* provided. Comparators change with context, whether user table or a catalog table comparison. Its * provided. Comparators change with context, whether user table or a catalog table comparison. Its
@ -64,23 +65,20 @@ import com.google.common.annotations.VisibleForTesting;
* <p> * <p>
* KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start * KeyValue wraps a byte array and takes offsets and lengths into passed array at where to start
* interpreting the content as KeyValue. The KeyValue format inside a byte array is: * interpreting the content as KeyValue. The KeyValue format inside a byte array is:
* <code>&lt;keylength&gt; &lt;valuelength&gt; &lt;key&gt; &lt;value&gt;</code> * <code>&lt;keylength&gt; &lt;valuelength&gt; &lt;key&gt; &lt;value&gt;</code> Key is further
* Key is further decomposed as: * decomposed as: <code>&lt;rowlength&gt; &lt;row&gt; &lt;columnfamilylength&gt;
* <code>&lt;rowlength&gt; &lt;row&gt; &lt;columnfamilylength&gt;
* &lt;columnfamily&gt; &lt;columnqualifier&gt; * &lt;columnfamily&gt; &lt;columnqualifier&gt;
* &lt;timestamp&gt; &lt;keytype&gt;</code> * &lt;timestamp&gt; &lt;keytype&gt;</code> The <code>rowlength</code> maximum is
* The <code>rowlength</code> maximum is <code>Short.MAX_SIZE</code>, column family length maximum * <code>Short.MAX_SIZE</code>, column family length maximum is <code>Byte.MAX_SIZE</code>, and
* is <code>Byte.MAX_SIZE</code>, and column qualifier + key length must be &lt; * column qualifier + key length must be &lt; <code>Integer.MAX_SIZE</code>. The column does not
* <code>Integer.MAX_SIZE</code>. The column does not contain the family/qualifier delimiter, * contain the family/qualifier delimiter, {@link #COLUMN_FAMILY_DELIMITER}<br>
* {@link #COLUMN_FAMILY_DELIMITER}<br>
* KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after * KeyValue can optionally contain Tags. When it contains tags, it is added in the byte array after
* the value part. The format for this part is: <code>&lt;tagslength&gt;&lt;tagsbytes&gt;</code>. * the value part. The format for this part is: <code>&lt;tagslength&gt;&lt;tagsbytes&gt;</code>.
* <code>tagslength</code> maximum is <code>Short.MAX_SIZE</code>. The <code>tagsbytes</code> * <code>tagslength</code> maximum is <code>Short.MAX_SIZE</code>. The <code>tagsbytes</code>
* contain one or more tags where as each tag is of the form * contain one or more tags where as each tag is of the form
* <code>&lt;taglength&gt;&lt;tagtype&gt;&lt;tagbytes&gt;</code>. * <code>&lt;taglength&gt;&lt;tagtype&gt;&lt;tagbytes&gt;</code>. <code>tagtype</code> is one byte
* <code>tagtype</code> is one byte and * and <code>taglength</code> maximum is <code>Short.MAX_SIZE</code> and it includes 1 byte type
* <code>taglength</code> maximum is <code>Short.MAX_SIZE</code> and it includes 1 byte type length * length and actual tag bytes length.
* and actual tag bytes length.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
@ -296,12 +294,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
/** Here be dragons **/ /** Here be dragons **/
// used to achieve atomic operations in the memstore.
@Override
public long getMvccVersion() {
return this.getSequenceId();
}
/** /**
* used to achieve atomic operations in the memstore. * used to achieve atomic operations in the memstore.
*/ */
@ -1172,9 +1164,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
*/ */
public Map<String, Object> toStringMap() { public Map<String, Object> toStringMap() {
Map<String, Object> stringMap = new HashMap<String, Object>(); Map<String, Object> stringMap = new HashMap<String, Object>();
stringMap.put("row", Bytes.toStringBinary(getRow())); stringMap.put("row", Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()));
stringMap.put("family", Bytes.toStringBinary(getFamily())); stringMap.put("family",
stringMap.put("qualifier", Bytes.toStringBinary(getQualifier())); Bytes.toStringBinary(getFamilyArray(), getFamilyOffset(), getFamilyLength()));
stringMap.put("qualifier",
Bytes.toStringBinary(getQualifierArray(), getQualifierOffset(), getQualifierLength()));
stringMap.put("timestamp", getTimestamp()); stringMap.put("timestamp", getTimestamp());
stringMap.put("vlen", getValueLength()); stringMap.put("vlen", getValueLength());
List<Tag> tags = getTags(); List<Tag> tags = getTags();
@ -1472,10 +1466,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
/** /**
* Do not use unless you have to. Used internally for compacting and testing. * Do not use unless you have to. Used internally for compacting and testing. Use
* * {@link #getRowArray()}, {@link #getFamilyArray()}, {@link #getQualifierArray()}, and
* Use {@link #getRow()}, {@link #getFamily()}, {@link #getQualifier()}, and * {@link #getValueArray()} if accessing a KeyValue client-side.
* {@link #getValue()} if accessing a KeyValue client-side.
* @return Copy of the key portion only. * @return Copy of the key portion only.
*/ */
public byte [] getKey() { public byte [] getKey() {
@ -1485,33 +1478,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return key; return key;
} }
/**
* Returns value in a new byte array.
* Primarily for use client-side. If server-side, use
* {@link #getBuffer()} with appropriate offsets and lengths instead to
* save on allocations.
* @return Value in a new byte array.
*/
@Override
@Deprecated // use CellUtil.getValueArray()
public byte [] getValue() {
return CellUtil.cloneValue(this);
}
/**
* Primarily for use client-side. Returns the row of this KeyValue in a new
* byte array.<p>
*
* If server-side, use {@link #getBuffer()} with appropriate offsets and
* lengths instead.
* @return Row in a new byte array.
*/
@Override
@Deprecated // use CellUtil.getRowArray()
public byte [] getRow() {
return CellUtil.cloneRow(this);
}
/** /**
* *
* @return Timestamp * @return Timestamp
@ -1556,35 +1522,6 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
return KeyValue.isDelete(getType()); return KeyValue.isDelete(getType());
} }
/**
* Primarily for use client-side. Returns the family of this KeyValue in a
* new byte array.<p>
*
* If server-side, use {@link #getBuffer()} with appropriate offsets and
* lengths instead.
* @return Returns family. Makes a copy.
*/
@Override
@Deprecated // use CellUtil.getFamilyArray
public byte [] getFamily() {
return CellUtil.cloneFamily(this);
}
/**
* Primarily for use client-side. Returns the column qualifier of this
* KeyValue in a new byte array.<p>
*
* If server-side, use {@link #getBuffer()} with appropriate offsets and
* lengths instead.
* Use {@link #getBuffer()} with appropriate offsets and lengths instead.
* @return Returns qualifier. Makes a copy.
*/
@Override
@Deprecated // use CellUtil.getQualifierArray
public byte [] getQualifier() {
return CellUtil.cloneQualifier(this);
}
/** /**
* This returns the offset where the tag actually starts. * This returns the offset where the tag actually starts.
*/ */

View File

@ -73,9 +73,9 @@ public class KeyValueTestUtil {
/** /**
* Checks whether KeyValues from kvCollection2 are contained in kvCollection1. * Checks whether KeyValues from kvCollection2 are contained in kvCollection1.
* *
* The comparison is made without distinguishing MVCC version of the KeyValues * The comparison is made without distinguishing MVCC version of the KeyValues
* *
* @param kvCollection1 * @param kvCollection1
* @param kvCollection2 * @param kvCollection2
* @return true if KeyValues from kvCollection2 are contained in kvCollection1 * @return true if KeyValues from kvCollection2 are contained in kvCollection1
@ -91,7 +91,7 @@ public class KeyValueTestUtil {
} }
return true; return true;
} }
public static List<KeyValue> rewindThenToList(final ByteBuffer bb, public static List<KeyValue> rewindThenToList(final ByteBuffer bb,
final boolean includesMemstoreTS, final boolean useTags) { final boolean includesMemstoreTS, final boolean useTags) {
bb.rewind(); bb.rewind();
@ -161,15 +161,16 @@ public class KeyValueTestUtil {
} }
protected static String getRowString(final KeyValue kv) { protected static String getRowString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getRow()); return Bytes.toStringBinary(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength());
} }
protected static String getFamilyString(final KeyValue kv) { protected static String getFamilyString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getFamily()); return Bytes.toStringBinary(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength());
} }
protected static String getQualifierString(final KeyValue kv) { protected static String getQualifierString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getQualifier()); return Bytes.toStringBinary(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength());
} }
protected static String getTimestampString(final KeyValue kv) { protected static String getTimestampString(final KeyValue kv) {
@ -177,11 +178,11 @@ public class KeyValueTestUtil {
} }
protected static String getTypeString(final KeyValue kv) { protected static String getTypeString(final KeyValue kv) {
return KeyValue.Type.codeToType(kv.getType()).toString(); return KeyValue.Type.codeToType(kv.getTypeByte()).toString();
} }
protected static String getValueString(final KeyValue kv) { protected static String getValueString(final KeyValue kv) {
return Bytes.toStringBinary(kv.getValue()); return Bytes.toStringBinary(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
} }
} }

View File

@ -81,7 +81,7 @@ public class KeyValueUtil {
public static int lengthWithMvccVersion(final KeyValue kv, final boolean includeMvccVersion) { public static int lengthWithMvccVersion(final KeyValue kv, final boolean includeMvccVersion) {
int length = kv.getLength(); int length = kv.getLength();
if (includeMvccVersion) { if (includeMvccVersion) {
length += WritableUtils.getVIntSize(kv.getMvccVersion()); length += WritableUtils.getVIntSize(kv.getSequenceId());
} }
return length; return length;
} }
@ -101,7 +101,7 @@ public class KeyValueUtil {
public static KeyValue copyToNewKeyValue(final Cell cell) { public static KeyValue copyToNewKeyValue(final Cell cell) {
byte[] bytes = copyToNewByteArray(cell); byte[] bytes = copyToNewByteArray(cell);
KeyValue kvCell = new KeyValue(bytes, 0, bytes.length); KeyValue kvCell = new KeyValue(bytes, 0, bytes.length);
kvCell.setSequenceId(cell.getMvccVersion()); kvCell.setSequenceId(cell.getSequenceId());
return kvCell; return kvCell;
} }
@ -173,9 +173,9 @@ public class KeyValueUtil {
bb.limit(bb.position() + kv.getLength()); bb.limit(bb.position() + kv.getLength());
bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includeMvccVersion) { if (includeMvccVersion) {
int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getMvccVersion()); int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getSequenceId());
ByteBufferUtils.extendLimit(bb, numMvccVersionBytes); ByteBufferUtils.extendLimit(bb, numMvccVersionBytes);
ByteBufferUtils.writeVLong(bb, kv.getMvccVersion()); ByteBufferUtils.writeVLong(bb, kv.getSequenceId());
} }
} }

View File

@ -56,7 +56,7 @@ public class CellCodec implements Codec {
// Value // Value
write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()); write(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
// MvccVersion // MvccVersion
this.out.write(Bytes.toBytes(cell.getMvccVersion())); this.out.write(Bytes.toBytes(cell.getSequenceId()));
} }
/** /**

View File

@ -58,7 +58,7 @@ public class CellCodecWithTags implements Codec {
// Tags // Tags
write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); write(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
// MvccVersion // MvccVersion
this.out.write(Bytes.toBytes(cell.getMvccVersion())); this.out.write(Bytes.toBytes(cell.getSequenceId()));
} }
/** /**

View File

@ -24,13 +24,12 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.SettableSequenceId; import org.apache.hadoop.hbase.SettableSequenceId;
import org.apache.hadoop.hbase.Streamable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteBufferOutputStream; import org.apache.hadoop.hbase.io.ByteBufferOutputStream;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
@ -256,11 +255,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return currentKey.getTypeByte(); return currentKey.getTypeByte();
} }
@Override
public long getMvccVersion() {
return memstoreTS;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return memstoreTS; return memstoreTS;
@ -302,30 +296,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return tagsLength; return tagsLength;
} }
@Override
@Deprecated
public byte[] getValue() {
throw new UnsupportedOperationException("getValue() not supported");
}
@Override
@Deprecated
public byte[] getFamily() {
throw new UnsupportedOperationException("getFamily() not supported");
}
@Override
@Deprecated
public byte[] getQualifier() {
throw new UnsupportedOperationException("getQualifier() not supported");
}
@Override
@Deprecated
public byte[] getRow() {
throw new UnsupportedOperationException("getRow() not supported");
}
@Override @Override
public String toString() { public String toString() {
return KeyValue.keyToString(this.keyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen=" return KeyValue.keyToString(this.keyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="
@ -334,7 +304,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
public Cell shallowCopy() { public Cell shallowCopy() {
return new ClonedSeekerState(currentBuffer, keyBuffer, currentKey.getRowLength(), return new ClonedSeekerState(currentBuffer, keyBuffer, currentKey.getRowLength(),
currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength, currentKey.getFamilyOffset(), currentKey.getFamilyLength(), keyLength,
currentKey.getQualifierOffset(), currentKey.getQualifierLength(), currentKey.getQualifierOffset(), currentKey.getQualifierLength(),
currentKey.getTimestamp(), currentKey.getTypeByte(), valueLength, valueOffset, currentKey.getTimestamp(), currentKey.getTypeByte(), valueLength, valueOffset,
memstoreTS, tagsOffset, tagsLength, tagCompressionContext, tagsBuffer); memstoreTS, tagsOffset, tagsLength, tagCompressionContext, tagsBuffer);
@ -342,9 +312,9 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
} }
/** /**
* Copies only the key part of the keybuffer by doing a deep copy and passes the * Copies only the key part of the keybuffer by doing a deep copy and passes the
* seeker state members for taking a clone. * seeker state members for taking a clone.
* Note that the value byte[] part is still pointing to the currentBuffer and the * Note that the value byte[] part is still pointing to the currentBuffer and the
* represented by the valueOffset and valueLength * represented by the valueOffset and valueLength
*/ */
// We return this as a Cell to the upper layers of read flow and might try setting a new SeqId // We return this as a Cell to the upper layers of read flow and might try setting a new SeqId
@ -372,7 +342,7 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
private byte[] cloneTagsBuffer; private byte[] cloneTagsBuffer;
private long seqId; private long seqId;
private TagCompressionContext tagCompressionContext; private TagCompressionContext tagCompressionContext;
protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength, protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength,
int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength, int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength,
long timeStamp, byte typeByte, int valueLen, int valueOffset, long seqId, long timeStamp, byte typeByte, int valueLen, int valueOffset, long seqId,
@ -455,12 +425,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return typeByte; return typeByte;
} }
@Override
@Deprecated
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return seqId; return seqId;
@ -502,30 +466,6 @@ abstract class BufferedDataBlockEncoder implements DataBlockEncoder {
return tagsLength; return tagsLength;
} }
@Override
@Deprecated
public byte[] getValue() {
return CellUtil.cloneValue(this);
}
@Override
@Deprecated
public byte[] getFamily() {
return CellUtil.cloneFamily(this);
}
@Override
@Deprecated
public byte[] getQualifier() {
return CellUtil.cloneQualifier(this);
}
@Override
@Deprecated
public byte[] getRow() {
return CellUtil.cloneRow(this);
}
@Override @Override
public String toString() { public String toString() {
return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen=" return KeyValue.keyToString(this.keyOnlyBuffer, 0, KeyValueUtil.keyLength(this)) + "/vlen="

View File

@ -301,7 +301,7 @@ public class RedundantKVGenerator {
for (KeyValue kv : keyValues) { for (KeyValue kv : keyValues) {
totalSize += kv.getLength(); totalSize += kv.getLength();
if (includesMemstoreTS) { if (includesMemstoreTS) {
totalSize += WritableUtils.getVIntSize(kv.getMvccVersion()); totalSize += WritableUtils.getVIntSize(kv.getSequenceId());
} }
} }
@ -309,7 +309,7 @@ public class RedundantKVGenerator {
for (KeyValue kv : keyValues) { for (KeyValue kv : keyValues) {
result.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); result.put(kv.getBuffer(), kv.getOffset(), kv.getLength());
if (includesMemstoreTS) { if (includesMemstoreTS) {
ByteBufferUtils.writeVLong(result, kv.getMvccVersion()); ByteBufferUtils.writeVLong(result, kv.getSequenceId());
} }
} }
return result; return result;

View File

@ -18,7 +18,9 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
@ -154,12 +156,6 @@ public class TestCellUtil {
return 0; return 0;
} }
@Override
public long getMvccVersion() {
// TODO Auto-generated method stub
return 0;
}
@Override @Override
public byte[] getValueArray() { public byte[] getValueArray() {
// TODO Auto-generated method stub // TODO Auto-generated method stub
@ -190,30 +186,6 @@ public class TestCellUtil {
return 0; return 0;
} }
@Override
public byte[] getValue() {
// TODO Auto-generated method stub
return null;
}
@Override
public byte[] getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
public byte[] getQualifier() {
// TODO Auto-generated method stub
return null;
}
@Override
public byte[] getRow() {
// TODO Auto-generated method stub
return null;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
// TODO Auto-generated method stub // TODO Auto-generated method stub
@ -592,11 +564,6 @@ public class TestCellUtil {
return KeyValue.Type.Put.getCode(); return KeyValue.Type.Put.getCode();
} }
@Override
public long getMvccVersion() {
return 0;
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return 0; return 0;
@ -639,26 +606,6 @@ public class TestCellUtil {
return tagsLen; return tagsLen;
} }
@Override
public byte[] getValue() {
throw new UnsupportedOperationException();
}
@Override
public byte[] getFamily() {
throw new UnsupportedOperationException();
}
@Override
public byte[] getQualifier() {
throw new UnsupportedOperationException();
}
@Override
public byte[] getRow() {
throw new UnsupportedOperationException();
}
@Override @Override
public ByteBuffer getRowByteBuffer() { public ByteBuffer getRowByteBuffer() {
return this.buffer; return this.buffer;

View File

@ -93,7 +93,8 @@ public class TestKeyValue extends TestCase {
private void check(final byte [] row, final byte [] family, byte [] qualifier, private void check(final byte [] row, final byte [] family, byte [] qualifier,
final long timestamp, final byte [] value) { final long timestamp, final byte [] value) {
KeyValue kv = new KeyValue(row, family, qualifier, timestamp, value); KeyValue kv = new KeyValue(row, family, qualifier, timestamp, value);
assertTrue(Bytes.compareTo(kv.getRow(), row) == 0); assertTrue(Bytes.compareTo(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
row.length) == 0);
assertTrue(CellUtil.matchingColumn(kv, family, qualifier)); assertTrue(CellUtil.matchingColumn(kv, family, qualifier));
// Call toString to make sure it works. // Call toString to make sure it works.
LOG.info(kv.toString()); LOG.info(kv.toString());
@ -390,9 +391,10 @@ public class TestKeyValue extends TestCase {
// keys are still the same // keys are still the same
assertTrue(kv1.equals(kv1ko)); assertTrue(kv1.equals(kv1ko));
// but values are not // but values are not
assertTrue(kv1ko.getValue().length == (useLen?Bytes.SIZEOF_INT:0)); assertTrue(kv1ko.getValueLength() == (useLen?Bytes.SIZEOF_INT:0));
if (useLen) { if (useLen) {
assertEquals(kv1.getValueLength(), Bytes.toInt(kv1ko.getValue())); assertEquals(kv1.getValueLength(),
Bytes.toInt(kv1ko.getValueArray(), kv1ko.getValueOffset(), kv1ko.getValueLength()));
} }
} }
} }
@ -442,10 +444,14 @@ public class TestKeyValue extends TestCase {
KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] { KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] {
new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) }); new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) });
assertTrue(kv.getTagsLength() > 0); assertTrue(kv.getTagsLength() > 0);
assertTrue(Bytes.equals(kv.getRow(), row)); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
assertTrue(Bytes.equals(kv.getFamily(), cf)); row.length));
assertTrue(Bytes.equals(kv.getQualifier(), q)); assertTrue(Bytes.equals(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(), cf, 0,
assertTrue(Bytes.equals(kv.getValue(), value)); cf.length));
assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), q, 0, q.length));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value, 0,
value.length));
List<Tag> tags = kv.getTags(); List<Tag> tags = kv.getTags();
assertNotNull(tags); assertNotNull(tags);
assertEquals(2, tags.size()); assertEquals(2, tags.size());
@ -492,7 +498,7 @@ public class TestKeyValue extends TestCase {
Bytes.equals(next.getValue(), metaValue2); Bytes.equals(next.getValue(), metaValue2);
assertFalse(tagItr.hasNext()); assertFalse(tagItr.hasNext());
} }
public void testMetaKeyComparator() { public void testMetaKeyComparator() {
CellComparator c = CellComparator.META_COMPARATOR; CellComparator c = CellComparator.META_COMPARATOR;
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
@ -500,23 +506,23 @@ public class TestKeyValue extends TestCase {
KeyValue a = new KeyValue(Bytes.toBytes("table1"), now); KeyValue a = new KeyValue(Bytes.toBytes("table1"), now);
KeyValue b = new KeyValue(Bytes.toBytes("table2"), now); KeyValue b = new KeyValue(Bytes.toBytes("table2"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table1,111"), now); a = new KeyValue(Bytes.toBytes("table1,111"), now);
b = new KeyValue(Bytes.toBytes("table2"), now); b = new KeyValue(Bytes.toBytes("table2"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table1"), now); a = new KeyValue(Bytes.toBytes("table1"), now);
b = new KeyValue(Bytes.toBytes("table2,111"), now); b = new KeyValue(Bytes.toBytes("table2,111"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table,111"), now); a = new KeyValue(Bytes.toBytes("table,111"), now);
b = new KeyValue(Bytes.toBytes("table,2222"), now); b = new KeyValue(Bytes.toBytes("table,2222"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now); a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now);
b = new KeyValue(Bytes.toBytes("table,2222"), now); b = new KeyValue(Bytes.toBytes("table,2222"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table,111"), now); a = new KeyValue(Bytes.toBytes("table,111"), now);
b = new KeyValue(Bytes.toBytes("table,2222.bbb"), now); b = new KeyValue(Bytes.toBytes("table,2222.bbb"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
@ -524,7 +530,7 @@ public class TestKeyValue extends TestCase {
a = new KeyValue(Bytes.toBytes("table,,aaaa"), now); a = new KeyValue(Bytes.toBytes("table,,aaaa"), now);
b = new KeyValue(Bytes.toBytes("table,111,bbb"), now); b = new KeyValue(Bytes.toBytes("table,111,bbb"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now); a = new KeyValue(Bytes.toBytes("table,111,aaaa"), now);
b = new KeyValue(Bytes.toBytes("table,111,bbb"), now); b = new KeyValue(Bytes.toBytes("table,111,bbb"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
@ -532,7 +538,7 @@ public class TestKeyValue extends TestCase {
a = new KeyValue(Bytes.toBytes("table,111,xxxx"), now); a = new KeyValue(Bytes.toBytes("table,111,xxxx"), now);
b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now); b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
a = new KeyValue(Bytes.toBytes("table,111,11,xxx"), now); a = new KeyValue(Bytes.toBytes("table,111,11,xxx"), now);
b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now); b = new KeyValue(Bytes.toBytes("table,111,222,bbb"), now);
assertTrue(c.compare(a, b) < 0); assertTrue(c.compare(a, b) < 0);
@ -590,12 +596,6 @@ public class TestKeyValue extends TestCase {
return this.kv.getTagsOffset(); return this.kv.getTagsOffset();
} }
// used to achieve atomic operations in the memstore.
@Override
public long getMvccVersion() {
return this.kv.getMvccVersion();
}
/** /**
* used to achieve atomic operations in the memstore. * used to achieve atomic operations in the memstore.
*/ */
@ -613,7 +613,7 @@ public class TestKeyValue extends TestCase {
} }
/** /**
* *
* @return Timestamp * @return Timestamp
*/ */
@Override @Override
@ -729,34 +729,6 @@ public class TestKeyValue extends TestCase {
return this.kv.getQualifierLength(); return this.kv.getQualifierLength();
} }
@Override
@Deprecated
public byte[] getValue() {
// TODO Auto-generated method stub
return null;
}
@Override
@Deprecated
public byte[] getFamily() {
// TODO Auto-generated method stub
return null;
}
@Override
@Deprecated
public byte[] getQualifier() {
// TODO Auto-generated method stub
return null;
}
@Override
@Deprecated
public byte[] getRow() {
// TODO Auto-generated method stub
return null;
}
/** /**
* @return the backing array of the entire KeyValue (all KeyValue fields are * @return the backing array of the entire KeyValue (all KeyValue fields are
* in a single array) * in a single array)

View File

@ -40,7 +40,7 @@ public class TestByteRangeWithKVSerialization {
pbr.put((byte) (tagsLen >> 8 & 0xff)); pbr.put((byte) (tagsLen >> 8 & 0xff));
pbr.put((byte) (tagsLen & 0xff)); pbr.put((byte) (tagsLen & 0xff));
pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen); pbr.put(kv.getTagsArray(), kv.getTagsOffset(), tagsLen);
pbr.putVLong(kv.getMvccVersion()); pbr.putVLong(kv.getSequenceId());
} }
static KeyValue readCell(PositionedByteRange pbr) throws Exception { static KeyValue readCell(PositionedByteRange pbr) throws Exception {
@ -88,7 +88,7 @@ public class TestByteRangeWithKVSerialization {
Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(), Assert.assertTrue(Bytes.equals(kv.getTagsArray(), kv.getTagsOffset(),
kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(), kv.getTagsLength(), kv1.getTagsArray(), kv1.getTagsOffset(),
kv1.getTagsLength())); kv1.getTagsLength()));
Assert.assertEquals(kv1.getMvccVersion(), kv.getMvccVersion()); Assert.assertEquals(kv1.getSequenceId(), kv.getSequenceId());
} }
} }
} }

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
@ -74,24 +75,24 @@ import org.junit.experimental.categories.Category;
/** /**
* IT test used to verify the deletes with visibility labels. * IT test used to verify the deletes with visibility labels.
* The test creates three tables tablename_0, tablename_1 and tablename_2 and each table * The test creates three tables tablename_0, tablename_1 and tablename_2 and each table
* is associated with a unique pair of labels. * is associated with a unique pair of labels.
* Another common table with the name 'commontable' is created and it has the data combined * Another common table with the name 'commontable' is created and it has the data combined
* from all these 3 tables such that there are 3 versions of every row but the visibility label * from all these 3 tables such that there are 3 versions of every row but the visibility label
* in every row corresponds to the table from which the row originated. * in every row corresponds to the table from which the row originated.
* Then deletes are issued to the common table by selecting the visibility label * Then deletes are issued to the common table by selecting the visibility label
* associated with each of the smaller tables. * associated with each of the smaller tables.
* After the delete is issued with one set of visibility labels we try to scan the common table * After the delete is issued with one set of visibility labels we try to scan the common table
* with each of the visibility pairs defined for the 3 tables. * with each of the visibility pairs defined for the 3 tables.
* So after the first delete is issued, a scan with the first set of visibility labels would * So after the first delete is issued, a scan with the first set of visibility labels would
* return zero result whereas the scan issued with the other two sets of visibility labels * return zero result whereas the scan issued with the other two sets of visibility labels
* should return all the rows corresponding to that set of visibility labels. The above * should return all the rows corresponding to that set of visibility labels. The above
* process of delete and scan is repeated until after the last set of visibility labels are * process of delete and scan is repeated until after the last set of visibility labels are
* used for the deletes the common table should not return any row. * used for the deletes the common table should not return any row.
* *
* To use this * To use this
* ./hbase org.apache.hadoop.hbase.test.IntegrationTestBigLinkedListWithVisibility Loop 1 1 20000 /tmp 1 10000 * ./hbase org.apache.hadoop.hbase.test.IntegrationTestBigLinkedListWithVisibility Loop 1 1 20000 /tmp 1 10000
* or * or
* ./hbase org.apache.hadoop.hbase.IntegrationTestsDriver -r .*IntegrationTestBigLinkedListWithVisibility.* * ./hbase org.apache.hadoop.hbase.IntegrationTestsDriver -r .*IntegrationTestBigLinkedListWithVisibility.*
*/ */
@Category(IntegrationTests.class) @Category(IntegrationTests.class)
@ -211,7 +212,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) { for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) {
Put put = new Put(current[i]); Put put = new Put(current[i]);
put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]); put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]);
if (count >= 0) { if (count >= 0) {
put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i)); put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
} }
@ -331,7 +332,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
@Override @Override
protected void processKV(ImmutableBytesWritable key, Result result, protected void processKV(ImmutableBytesWritable key, Result result,
org.apache.hadoop.mapreduce.Mapper.Context context, Put put, org.apache.hadoop.mapreduce.Mapper.Context context, Put put,
org.apache.hadoop.hbase.client.Delete delete) throws org.apache.hadoop.hbase.client.Delete delete) throws
IOException, InterruptedException { IOException, InterruptedException {
String visibilityExps = split[index * 2] + OR + split[(index * 2) + 1]; String visibilityExps = split[index * 2] + OR + split[(index * 2) + 1];
for (Cell kv : result.rawCells()) { for (Cell kv : result.rawCells()) {
@ -343,7 +344,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
delete = new Delete(key.get()); delete = new Delete(key.get());
} }
delete.setCellVisibility(new CellVisibility(visibilityExps)); delete.setCellVisibility(new CellVisibility(visibilityExps));
delete.deleteFamily(kv.getFamily()); delete.deleteFamily(CellUtil.cloneFamily(kv));
} }
if (delete != null) { if (delete != null) {
context.write(key, delete); context.write(key, delete);
@ -356,14 +357,14 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
super.addOptions(); super.addOptions();
addOptWithArg("u", USER_OPT, "User name"); addOptWithArg("u", USER_OPT, "User name");
} }
@Override @Override
protected void processOptions(CommandLine cmd) { protected void processOptions(CommandLine cmd) {
super.processOptions(cmd); super.processOptions(cmd);
if (cmd.hasOption(USER_OPT)) { if (cmd.hasOption(USER_OPT)) {
userName = cmd.getOptionValue(USER_OPT); userName = cmd.getOptionValue(USER_OPT);
} }
} }
@Override @Override
public void setUpCluster() throws Exception { public void setUpCluster() throws Exception {
@ -561,7 +562,7 @@ public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestB
} }
} }
private void verify(int numReducers, long expectedNumNodes, private void verify(int numReducers, long expectedNumNodes,
Path iterationOutput, Verify verify) throws Exception { Path iterationOutput, Verify verify) throws Exception {
verify.setConf(getConf()); verify.setConf(getConf());
int retCode = verify.run(iterationOutput, numReducers); int retCode = verify.run(iterationOutput, numReducers);

View File

@ -102,7 +102,7 @@ public class PrefixTreeCodec implements DataBlockEncoder {
ByteBufferUtils.skip(result, keyValueLength); ByteBufferUtils.skip(result, keyValueLength);
offset += keyValueLength; offset += keyValueLength;
if (includesMvcc) { if (includesMvcc) {
ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion()); ByteBufferUtils.writeVLong(result, currentCell.getSequenceId());
} }
} }
result.position(result.limit());//make it appear as if we were appending result.position(result.limit());//make it appear as if we were appending

View File

@ -314,12 +314,6 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return type; return type;
} }
@Override
@Deprecated
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return seqId; return seqId;
@ -355,30 +349,6 @@ public class PrefixTreeSeeker implements EncodedSeeker {
return this.tagsLength; return this.tagsLength;
} }
@Override
@Deprecated
public byte[] getValue() {
return this.val;
}
@Override
@Deprecated
public byte[] getFamily() {
return this.fam;
}
@Override
@Deprecated
public byte[] getQualifier() {
return this.qual;
}
@Override
@Deprecated
public byte[] getRow() {
return this.row;
}
@Override @Override
public String toString() { public String toString() {
String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength()); String row = Bytes.toStringBinary(getRowArray(), getRowOffset(), getRowLength());

View File

@ -131,18 +131,13 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
} }
@Override @Override
public long getMvccVersion() { public long getSequenceId() {
if (!includeMvccVersion) { if (!includeMvccVersion) {
return 0L; return 0L;
} }
return mvccVersion; return mvccVersion;
} }
@Override
public long getSequenceId() {
return getMvccVersion();
}
@Override @Override
public int getValueLength() { public int getValueLength() {
return valueLength; return valueLength;
@ -208,27 +203,6 @@ public class PrefixTreeCell implements Cell, SettableSequenceId, Comparable<Cell
return type.getCode(); return type.getCode();
} }
/* Deprecated methods pushed into the Cell interface */
@Override
public byte[] getValue() {
return CellUtil.cloneValue(this);
}
@Override
public byte[] getFamily() {
return CellUtil.cloneFamily(this);
}
@Override
public byte[] getQualifier() {
return CellUtil.cloneQualifier(this);
}
@Override
public byte[] getRow() {
return CellUtil.cloneRow(this);
}
/************************* helper methods *************************/ /************************* helper methods *************************/
/** /**

View File

@ -296,9 +296,9 @@ public class PrefixTreeEncoder implements CellOutputStream {
// memstore timestamps // memstore timestamps
if (includeMvccVersion) { if (includeMvccVersion) {
mvccVersions[totalCells] = cell.getMvccVersion(); mvccVersions[totalCells] = cell.getSequenceId();
mvccVersionEncoder.add(cell.getMvccVersion()); mvccVersionEncoder.add(cell.getSequenceId());
totalUnencodedBytes += WritableUtils.getVIntSize(cell.getMvccVersion()); totalUnencodedBytes += WritableUtils.getVIntSize(cell.getSequenceId());
}else{ }else{
//must overwrite in case there was a previous version in this array slot //must overwrite in case there was a previous version in this array slot
mvccVersions[totalCells] = 0L; mvccVersions[totalCells] = 0L;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.codec.prefixtree.column.data;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.codec.prefixtree.column.TestColumnData; import org.apache.hadoop.hbase.codec.prefixtree.column.TestColumnData;
import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRange;
@ -40,7 +41,7 @@ public class TestColumnDataRandom implements TestColumnData {
ByteRangeSet sortedColumns = new ByteRangeTreeSet(); ByteRangeSet sortedColumns = new ByteRangeTreeSet();
List<KeyValue> d = generator.generateTestKeyValues(numColumns); List<KeyValue> d = generator.generateTestKeyValues(numColumns);
for (KeyValue col : d) { for (KeyValue col : d) {
ByteRange colRange = new SimpleMutableByteRange(col.getQualifier()); ByteRange colRange = new SimpleMutableByteRange(CellUtil.cloneQualifier(col));
inputs.add(colRange); inputs.add(colRange);
sortedColumns.add(colRange); sortedColumns.add(colRange);
} }

View File

@ -181,7 +181,7 @@ public class TestRowEncoder {
// assert keys are equal (doesn't compare values) // assert keys are equal (doesn't compare values)
Assert.assertEquals(expected, actual); Assert.assertEquals(expected, actual);
if (includeMemstoreTS) { if (includeMemstoreTS) {
Assert.assertEquals(expected.getMvccVersion(), actual.getMvccVersion()); Assert.assertEquals(expected.getSequenceId(), actual.getSequenceId());
} }
// assert values equal // assert values equal
Assert.assertTrue(Bytes.equals(expected.getValueArray(), expected.getValueOffset(), Assert.assertTrue(Bytes.equals(expected.getValueArray(), expected.getValueOffset(),

View File

@ -115,7 +115,8 @@ public class RemoteHTable implements Table {
if (o instanceof byte[]) { if (o instanceof byte[]) {
sb.append(Bytes.toStringBinary((byte[])o)); sb.append(Bytes.toStringBinary((byte[])o));
} else if (o instanceof KeyValue) { } else if (o instanceof KeyValue) {
sb.append(Bytes.toStringBinary(((KeyValue)o).getQualifier())); sb.append(Bytes.toStringBinary(((KeyValue) o).getRowArray(),
((KeyValue) o).getRowOffset(), ((KeyValue) o).getRowLength()));
} else { } else {
throw new RuntimeException("object type not handled"); throw new RuntimeException("object type not handled");
} }

View File

@ -19,6 +19,10 @@
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.StringWriter; import java.io.StringWriter;
import java.util.ArrayList; import java.util.ArrayList;
@ -32,16 +36,24 @@ import javax.xml.bind.Unmarshaller;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.filter.InclusiveStopFilter; import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
import org.apache.hadoop.hbase.filter.PageFilter; import org.apache.hadoop.hbase.filter.PageFilter;
@ -52,8 +64,6 @@ import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SkipFilter; import org.apache.hadoop.hbase.filter.SkipFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator; import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.client.Response;
@ -64,9 +74,6 @@ import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import static org.junit.Assert.*;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
@ -108,7 +115,7 @@ public class TestScannersWithFilters {
}; };
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseRESTTestingUtility REST_TEST_UTIL = private static final HBaseRESTTestingUtility REST_TEST_UTIL =
new HBaseRESTTestingUtility(); new HBaseRESTTestingUtility();
private static Client client; private static Client client;
private static JAXBContext context; private static JAXBContext context;
@ -128,7 +135,7 @@ public class TestScannersWithFilters {
ScannerModel.class); ScannerModel.class);
marshaller = context.createMarshaller(); marshaller = context.createMarshaller();
unmarshaller = context.createUnmarshaller(); unmarshaller = context.createUnmarshaller();
client = new Client(new Cluster().add("localhost", client = new Client(new Cluster().add("localhost",
REST_TEST_UTIL.getServletPort())); REST_TEST_UTIL.getServletPort()));
Admin admin = TEST_UTIL.getHBaseAdmin(); Admin admin = TEST_UTIL.getHBaseAdmin();
if (!admin.tableExists(TABLE)) { if (!admin.tableExists(TABLE)) {
@ -154,7 +161,7 @@ public class TestScannersWithFilters {
} }
table.put(p); table.put(p);
} }
// Insert second half (reverse families) // Insert second half (reverse families)
for(byte [] ROW : ROWS_ONE) { for(byte [] ROW : ROWS_ONE) {
Put p = new Put(ROW); Put p = new Put(ROW);
@ -172,14 +179,14 @@ public class TestScannersWithFilters {
} }
table.put(p); table.put(p);
} }
// Delete the second qualifier from all rows and families // Delete the second qualifier from all rows and families
for(byte [] ROW : ROWS_ONE) { for(byte [] ROW : ROWS_ONE) {
Delete d = new Delete(ROW); Delete d = new Delete(ROW);
d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]); d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]); d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
table.delete(d); table.delete(d);
} }
for(byte [] ROW : ROWS_TWO) { for(byte [] ROW : ROWS_TWO) {
Delete d = new Delete(ROW); Delete d = new Delete(ROW);
d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]); d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
@ -187,7 +194,7 @@ public class TestScannersWithFilters {
table.delete(d); table.delete(d);
} }
colsPerRow -= 2; colsPerRow -= 2;
// Delete the second rows from both groups, one column at a time // Delete the second rows from both groups, one column at a time
for(byte [] QUALIFIER : QUALIFIERS_ONE) { for(byte [] QUALIFIER : QUALIFIERS_ONE) {
Delete d = new Delete(ROWS_ONE[1]); Delete d = new Delete(ROWS_ONE[1]);
@ -212,7 +219,7 @@ public class TestScannersWithFilters {
TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.shutdownMiniCluster();
} }
private static void verifyScan(Scan s, long expectedRows, long expectedKeys) private static void verifyScan(Scan s, long expectedRows, long expectedKeys)
throws Exception { throws Exception {
ScannerModel model = ScannerModel.fromScan(s); ScannerModel model = ScannerModel.fromScan(s);
model.setBatch(Integer.MAX_VALUE); // fetch it all at once model.setBatch(Integer.MAX_VALUE); // fetch it all at once
@ -234,7 +241,7 @@ public class TestScannersWithFilters {
unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody()));
int rows = cells.getRows().size(); int rows = cells.getRows().size();
assertTrue("Scanned too many rows! Only expected " + expectedRows + assertTrue("Scanned too many rows! Only expected " + expectedRows +
" total but scanned " + rows, expectedRows == rows); " total but scanned " + rows, expectedRows == rows);
for (RowModel row: cells.getRows()) { for (RowModel row: cells.getRows()) {
int count = row.getCells().size(); int count = row.getCells().size();
@ -247,7 +254,7 @@ public class TestScannersWithFilters {
assertEquals(response.getCode(), 200); assertEquals(response.getCode(), 200);
} }
private static void verifyScanFull(Scan s, KeyValue [] kvs) private static void verifyScanFull(Scan s, KeyValue [] kvs)
throws Exception { throws Exception {
ScannerModel model = ScannerModel.fromScan(s); ScannerModel model = ScannerModel.fromScan(s);
model.setBatch(Integer.MAX_VALUE); // fetch it all at once model.setBatch(Integer.MAX_VALUE); // fetch it all at once
@ -281,19 +288,19 @@ public class TestScannersWithFilters {
RowModel rowModel = i.next(); RowModel rowModel = i.next();
List<CellModel> cells = rowModel.getCells(); List<CellModel> cells = rowModel.getCells();
if (cells.isEmpty()) break; if (cells.isEmpty()) break;
assertTrue("Scanned too many keys! Only expected " + kvs.length + assertTrue("Scanned too many keys! Only expected " + kvs.length +
" total but already scanned " + (cells.size() + idx), " total but already scanned " + (cells.size() + idx),
kvs.length >= idx + cells.size()); kvs.length >= idx + cells.size());
for (CellModel cell: cells) { for (CellModel cell: cells) {
assertTrue("Row mismatch", assertTrue("Row mismatch",
Bytes.equals(rowModel.getKey(), kvs[idx].getRow())); Bytes.equals(rowModel.getKey(), CellUtil.cloneRow(kvs[idx])));
byte[][] split = KeyValue.parseColumn(cell.getColumn()); byte[][] split = KeyValue.parseColumn(cell.getColumn());
assertTrue("Family mismatch", assertTrue("Family mismatch",
Bytes.equals(split[0], kvs[idx].getFamily())); Bytes.equals(split[0], CellUtil.cloneFamily(kvs[idx])));
assertTrue("Qualifier mismatch", assertTrue("Qualifier mismatch",
Bytes.equals(split[1], kvs[idx].getQualifier())); Bytes.equals(split[1], CellUtil.cloneQualifier(kvs[idx])));
assertTrue("Value mismatch", assertTrue("Value mismatch",
Bytes.equals(cell.getValue(), kvs[idx].getValue())); Bytes.equals(cell.getValue(), CellUtil.cloneValue(kvs[idx])));
idx++; idx++;
} }
} }
@ -309,7 +316,7 @@ public class TestScannersWithFilters {
marshaller.marshal(model, writer); marshaller.marshal(model, writer);
LOG.debug(writer.toString()); LOG.debug(writer.toString());
byte[] body = Bytes.toBytes(writer.toString()); byte[] body = Bytes.toBytes(writer.toString());
Response response = client.put("/" + TABLE + "/scanner", Response response = client.put("/" + TABLE + "/scanner",
Constants.MIMETYPE_XML, body); Constants.MIMETYPE_XML, body);
assertEquals(response.getCode(), 201); assertEquals(response.getCode(), 201);
String scannerURI = response.getLocation(); String scannerURI = response.getLocation();
@ -334,7 +341,7 @@ public class TestScannersWithFilters {
RowModel rowModel = i.next(); RowModel rowModel = i.next();
List<CellModel> cells = rowModel.getCells(); List<CellModel> cells = rowModel.getCells();
if (cells.isEmpty()) break; if (cells.isEmpty()) break;
assertTrue("Scanned too many rows! Only expected " + expectedRows + assertTrue("Scanned too many rows! Only expected " + expectedRows +
" total but already scanned " + (j+1), expectedRows > j); " total but already scanned " + (j+1), expectedRows > j);
assertEquals("Expected " + expectedKeys + " keys per row but " + assertEquals("Expected " + expectedKeys + " keys per row but " +
"returned " + cells.size(), expectedKeys, cells.size()); "returned " + cells.size(), expectedKeys, cells.size());
@ -348,7 +355,7 @@ public class TestScannersWithFilters {
// No filter // No filter
long expectedRows = numRows; long expectedRows = numRows;
long expectedKeys = colsPerRow; long expectedKeys = colsPerRow;
// Both families // Both families
Scan s = new Scan(); Scan s = new Scan();
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
@ -416,7 +423,7 @@ public class TestScannersWithFilters {
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]) new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
}; };
// Grab all 6 rows // Grab all 6 rows
long expectedRows = 6; long expectedRows = 6;
long expectedKeys = colsPerRow; long expectedKeys = colsPerRow;
@ -425,7 +432,7 @@ public class TestScannersWithFilters {
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
s.setFilter(new PageFilter(expectedRows)); s.setFilter(new PageFilter(expectedRows));
verifyScanFull(s, expectedKVs); verifyScanFull(s, expectedKVs);
// Grab first 4 rows (6 cols per row) // Grab first 4 rows (6 cols per row)
expectedRows = 4; expectedRows = 4;
expectedKeys = colsPerRow; expectedKeys = colsPerRow;
@ -434,7 +441,7 @@ public class TestScannersWithFilters {
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
s.setFilter(new PageFilter(expectedRows)); s.setFilter(new PageFilter(expectedRows));
verifyScanFull(s, Arrays.copyOf(expectedKVs, 24)); verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
// Grab first 2 rows // Grab first 2 rows
expectedRows = 2; expectedRows = 2;
expectedKeys = colsPerRow; expectedKeys = colsPerRow;
@ -451,20 +458,20 @@ public class TestScannersWithFilters {
s.setFilter(new PageFilter(expectedRows)); s.setFilter(new PageFilter(expectedRows));
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
s.setFilter(new PageFilter(expectedRows)); s.setFilter(new PageFilter(expectedRows));
verifyScanFull(s, Arrays.copyOf(expectedKVs, 6)); verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));
} }
@Test @Test
public void testInclusiveStopFilter() throws Exception { public void testInclusiveStopFilter() throws Exception {
// Grab rows from group one // Grab rows from group one
// If we just use start/stop row, we get total/2 - 1 rows // If we just use start/stop row, we get total/2 - 1 rows
long expectedRows = (numRows / 2) - 1; long expectedRows = (numRows / 2) - 1;
long expectedKeys = colsPerRow; long expectedKeys = colsPerRow;
Scan s = new Scan(Bytes.toBytes("testRowOne-0"), Scan s = new Scan(Bytes.toBytes("testRowOne-0"),
Bytes.toBytes("testRowOne-3")); Bytes.toBytes("testRowOne-3"));
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
// Now use start row with inclusive stop filter // Now use start row with inclusive stop filter
expectedRows = numRows / 2; expectedRows = numRows / 2;
s = new Scan(Bytes.toBytes("testRowOne-0")); s = new Scan(Bytes.toBytes("testRowOne-0"));
@ -472,14 +479,14 @@ public class TestScannersWithFilters {
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
// Grab rows from group two // Grab rows from group two
// If we just use start/stop row, we get total/2 - 1 rows // If we just use start/stop row, we get total/2 - 1 rows
expectedRows = (numRows / 2) - 1; expectedRows = (numRows / 2) - 1;
expectedKeys = colsPerRow; expectedKeys = colsPerRow;
s = new Scan(Bytes.toBytes("testRowTwo-0"), s = new Scan(Bytes.toBytes("testRowTwo-0"),
Bytes.toBytes("testRowTwo-3")); Bytes.toBytes("testRowTwo-3"));
verifyScan(s, expectedRows, expectedKeys); verifyScan(s, expectedRows, expectedKeys);
// Now use start row with inclusive stop filter // Now use start row with inclusive stop filter
expectedRows = numRows / 2; expectedRows = numRows / 2;
s = new Scan(Bytes.toBytes("testRowTwo-0")); s = new Scan(Bytes.toBytes("testRowTwo-0"));
@ -497,7 +504,7 @@ public class TestScannersWithFilters {
Scan s = new Scan(); Scan s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys less than same qualifier // Match keys less than same qualifier
// Expect only two keys (one from each family) in half the rows // Expect only two keys (one from each family) in half the rows
expectedRows = numRows / 2; expectedRows = numRows / 2;
@ -507,7 +514,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys less than or equal // Match keys less than or equal
// Expect four keys (two from each family) in half the rows // Expect four keys (two from each family) in half the rows
expectedRows = numRows / 2; expectedRows = numRows / 2;
@ -517,7 +524,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys not equal // Match keys not equal
// Expect four keys (two from each family) // Expect four keys (two from each family)
// Only look in first group of rows // Only look in first group of rows
@ -528,7 +535,7 @@ public class TestScannersWithFilters {
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo")); s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys greater or equal // Match keys greater or equal
// Expect four keys (two from each family) // Expect four keys (two from each family)
// Only look in first group of rows // Only look in first group of rows
@ -539,7 +546,7 @@ public class TestScannersWithFilters {
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo")); s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys greater // Match keys greater
// Expect two keys (one from each family) // Expect two keys (one from each family)
// Only look in first group of rows // Only look in first group of rows
@ -550,7 +557,7 @@ public class TestScannersWithFilters {
s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo")); s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys not equal to // Match keys not equal to
// Look across rows and fully validate the keys and ordering // Look across rows and fully validate the keys and ordering
// Expect varied numbers of keys, 4 per row in group one, 6 per row in // Expect varied numbers of keys, 4 per row in group one, 6 per row in
@ -559,7 +566,7 @@ public class TestScannersWithFilters {
new BinaryComparator(QUALIFIERS_ONE[2])); new BinaryComparator(QUALIFIERS_ONE[2]));
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
KeyValue [] kvs = { KeyValue [] kvs = {
// testRowOne-0 // testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@ -599,7 +606,7 @@ public class TestScannersWithFilters {
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
}; };
verifyScanFull(s, kvs); verifyScanFull(s, kvs);
// Test across rows and groups with a regex // Test across rows and groups with a regex
// Filter out "test*-2" // Filter out "test*-2"
// Expect 4 keys per row across both groups // Expect 4 keys per row across both groups
@ -607,7 +614,7 @@ public class TestScannersWithFilters {
new RegexStringComparator("test.+-2")); new RegexStringComparator("test.+-2"));
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
kvs = new KeyValue [] { kvs = new KeyValue [] {
// testRowOne-0 // testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@ -653,7 +660,7 @@ public class TestScannersWithFilters {
Scan s = new Scan(); Scan s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match a two rows, one from each group, using regex // Match a two rows, one from each group, using regex
expectedRows = 2; expectedRows = 2;
expectedKeys = colsPerRow; expectedKeys = colsPerRow;
@ -662,7 +669,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match rows less than // Match rows less than
// Expect all keys in one row // Expect all keys in one row
expectedRows = 1; expectedRows = 1;
@ -672,7 +679,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match rows less than or equal // Match rows less than or equal
// Expect all keys in two rows // Expect all keys in two rows
expectedRows = 2; expectedRows = 2;
@ -682,7 +689,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match rows not equal // Match rows not equal
// Expect all keys in all but one row // Expect all keys in all but one row
expectedRows = numRows - 1; expectedRows = numRows - 1;
@ -692,7 +699,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys greater or equal // Match keys greater or equal
// Expect all keys in all but one row // Expect all keys in all but one row
expectedRows = numRows - 1; expectedRows = numRows - 1;
@ -702,7 +709,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match keys greater // Match keys greater
// Expect all keys in all but two rows // Expect all keys in all but two rows
expectedRows = numRows - 2; expectedRows = numRows - 2;
@ -712,7 +719,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match rows not equal to testRowTwo-2 // Match rows not equal to testRowTwo-2
// Look across rows and fully validate the keys and ordering // Look across rows and fully validate the keys and ordering
// Should see all keys in all rows but testRowTwo-2 // Should see all keys in all rows but testRowTwo-2
@ -720,7 +727,7 @@ public class TestScannersWithFilters {
new BinaryComparator(Bytes.toBytes("testRowOne-2"))); new BinaryComparator(Bytes.toBytes("testRowOne-2")));
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
KeyValue [] kvs = { KeyValue [] kvs = {
// testRowOne-0 // testRowOne-0
new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@ -759,7 +766,7 @@ public class TestScannersWithFilters {
new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
}; };
verifyScanFull(s, kvs); verifyScanFull(s, kvs);
// Test across rows and groups with a regex // Test across rows and groups with a regex
// Filter out everything that doesn't match "*-2" // Filter out everything that doesn't match "*-2"
// Expect all keys in two rows // Expect all keys in two rows
@ -767,7 +774,7 @@ public class TestScannersWithFilters {
new RegexStringComparator(".+-2")); new RegexStringComparator(".+-2"));
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
kvs = new KeyValue [] { kvs = new KeyValue [] {
// testRowOne-2 // testRowOne-2
new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]), new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
@ -825,7 +832,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match values less than or equal // Match values less than or equal
// Expect all rows // Expect all rows
expectedRows = numRows; expectedRows = numRows;
@ -845,7 +852,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match values not equal // Match values not equal
// Expect half the rows // Expect half the rows
expectedRows = numRows / 2; expectedRows = numRows / 2;
@ -855,7 +862,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match values greater or equal // Match values greater or equal
// Expect all rows // Expect all rows
expectedRows = numRows; expectedRows = numRows;
@ -865,7 +872,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match values greater // Match values greater
// Expect half rows // Expect half rows
expectedRows = numRows / 2; expectedRows = numRows / 2;
@ -875,7 +882,7 @@ public class TestScannersWithFilters {
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
verifyScanNoEarlyOut(s, expectedRows, expectedKeys); verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
// Match values not equal to testValueOne // Match values not equal to testValueOne
// Look across rows and fully validate the keys and ordering // Look across rows and fully validate the keys and ordering
// Should see all keys in all group two rows // Should see all keys in all group two rows
@ -883,7 +890,7 @@ public class TestScannersWithFilters {
new BinaryComparator(Bytes.toBytes("testValueOne"))); new BinaryComparator(Bytes.toBytes("testValueOne")));
s = new Scan(); s = new Scan();
s.setFilter(f); s.setFilter(f);
KeyValue [] kvs = { KeyValue [] kvs = {
// testRowTwo-0 // testRowTwo-0
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
@ -918,7 +925,7 @@ public class TestScannersWithFilters {
new BinaryComparator(Bytes.toBytes("testQualifierOne-2")))); new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
Scan s = new Scan(); Scan s = new Scan();
s.setFilter(f); s.setFilter(f);
KeyValue [] kvs = { KeyValue [] kvs = {
// testRowTwo-0 // testRowTwo-0
new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]), new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
@ -947,7 +954,7 @@ public class TestScannersWithFilters {
@Test @Test
public void testFilterList() throws Exception { public void testFilterList() throws Exception {
// Test getting a single row, single key using Row, Qualifier, and Value // Test getting a single row, single key using Row, Qualifier, and Value
// regular expression and substring filters // regular expression and substring filters
// Use must pass all // Use must pass all
List<Filter> filters = new ArrayList<Filter>(); List<Filter> filters = new ArrayList<Filter>();

View File

@ -106,12 +106,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
return cell.getTypeByte(); return cell.getTypeByte();
} }
@Override
@Deprecated
public long getMvccVersion() {
return getSequenceId();
}
@Override @Override
public long getSequenceId() { public long getSequenceId() {
return cell.getSequenceId(); return cell.getSequenceId();
@ -151,30 +145,6 @@ public class TagRewriteCell implements Cell, SettableSequenceId, SettableTimesta
return this.tags.length; return this.tags.length;
} }
@Override
@Deprecated
public byte[] getValue() {
return cell.getValue();
}
@Override
@Deprecated
public byte[] getFamily() {
return cell.getFamily();
}
@Override
@Deprecated
public byte[] getQualifier() {
return cell.getQualifier();
}
@Override
@Deprecated
public byte[] getRow() {
return cell.getRow();
}
@Override @Override
public long heapSize() { public long heapSize() {
long sum = CellUtil.estimatedHeapSizeOf(cell) - cell.getTagsLength(); long sum = CellUtil.estimatedHeapSizeOf(cell) - cell.getTagsLength();

View File

@ -17,6 +17,12 @@
*/ */
package org.apache.hadoop.hbase.mapreduce; package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -48,12 +54,6 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.TreeMap;
/** /**
* A tool to replay WAL files as a M/R job. * A tool to replay WAL files as a M/R job.
* The WAL can be replayed for a set of tables or all tables, * The WAL can be replayed for a set of tables or all tables,
@ -106,8 +106,8 @@ public class WALPlayer extends Configured implements Tool {
if (Bytes.equals(table, key.getTablename().getName())) { if (Bytes.equals(table, key.getTablename().getName())) {
for (Cell cell : value.getCells()) { for (Cell cell : value.getCells()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (WALEdit.isMetaEditFamily(kv.getFamily())) continue; if (WALEdit.isMetaEditFamily(kv)) continue;
context.write(new ImmutableBytesWritable(kv.getRow()), kv); context.write(new ImmutableBytesWritable(CellUtil.cloneRow(kv)), kv);
} }
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -149,7 +149,7 @@ public class WALPlayer extends Configured implements Tool {
Cell lastCell = null; Cell lastCell = null;
for (Cell cell : value.getCells()) { for (Cell cell : value.getCells()) {
// filtering WAL meta entries // filtering WAL meta entries
if (WALEdit.isMetaEditFamily(cell.getFamily())) continue; if (WALEdit.isMetaEditFamily(cell)) continue;
// Allow a subclass filter out this cell. // Allow a subclass filter out this cell.
if (filter(context, cell)) { if (filter(context, cell)) {
@ -163,9 +163,9 @@ public class WALPlayer extends Configured implements Tool {
if (put != null) context.write(tableOut, put); if (put != null) context.write(tableOut, put);
if (del != null) context.write(tableOut, del); if (del != null) context.write(tableOut, del);
if (CellUtil.isDelete(cell)) { if (CellUtil.isDelete(cell)) {
del = new Delete(cell.getRow()); del = new Delete(CellUtil.cloneRow(cell));
} else { } else {
put = new Put(cell.getRow()); put = new Put(CellUtil.cloneRow(cell));
} }
} }
if (CellUtil.isDelete(cell)) { if (CellUtil.isDelete(cell)) {

View File

@ -329,7 +329,7 @@ public class ScanQueryMatcher {
* they affect * they affect
*/ */
byte typeByte = cell.getTypeByte(); byte typeByte = cell.getTypeByte();
long mvccVersion = cell.getMvccVersion(); long mvccVersion = cell.getSequenceId();
if (CellUtil.isDelete(cell)) { if (CellUtil.isDelete(cell)) {
if (keepDeletedCells == KeepDeletedCells.FALSE if (keepDeletedCells == KeepDeletedCells.FALSE
|| (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) { || (keepDeletedCells == KeepDeletedCells.TTL && timestamp < ttl)) {

View File

@ -34,6 +34,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue; import org.apache.hadoop.hbase.KeyValue.KeyOnlyKeyValue;
@ -168,7 +169,7 @@ public class StripeStoreFileManager
// Order matters for this call. // Order matters for this call.
result.addSublist(state.level0Files); result.addSublist(state.level0Files);
if (!state.stripeFiles.isEmpty()) { if (!state.stripeFiles.isEmpty()) {
int lastStripeIndex = findStripeForRow(targetKey.getRow(), false); int lastStripeIndex = findStripeForRow(CellUtil.cloneRow(targetKey), false);
for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) { for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) {
result.addSublist(state.stripeFiles.get(stripeIndex)); result.addSublist(state.stripeFiles.get(stripeIndex));
} }

View File

@ -288,7 +288,7 @@ public class WALEdit implements Writable, HeapSize {
public static FlushDescriptor getFlushDescriptor(Cell cell) throws IOException { public static FlushDescriptor getFlushDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, FLUSH)) { if (CellUtil.matchingColumn(cell, METAFAMILY, FLUSH)) {
return FlushDescriptor.parseFrom(cell.getValue()); return FlushDescriptor.parseFrom(CellUtil.cloneValue(cell));
} }
return null; return null;
} }
@ -302,7 +302,7 @@ public class WALEdit implements Writable, HeapSize {
public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException { public static RegionEventDescriptor getRegionEventDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) { if (CellUtil.matchingColumn(cell, METAFAMILY, REGION_EVENT)) {
return RegionEventDescriptor.parseFrom(cell.getValue()); return RegionEventDescriptor.parseFrom(CellUtil.cloneValue(cell));
} }
return null; return null;
} }
@ -336,7 +336,7 @@ public class WALEdit implements Writable, HeapSize {
*/ */
public static CompactionDescriptor getCompaction(Cell kv) throws IOException { public static CompactionDescriptor getCompaction(Cell kv) throws IOException {
if (CellUtil.matchingColumn(kv, METAFAMILY, COMPACTION)) { if (CellUtil.matchingColumn(kv, METAFAMILY, COMPACTION)) {
return CompactionDescriptor.parseFrom(kv.getValue()); return CompactionDescriptor.parseFrom(CellUtil.cloneValue(kv));
} }
return null; return null;
} }
@ -365,7 +365,7 @@ public class WALEdit implements Writable, HeapSize {
*/ */
public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException { public static WALProtos.BulkLoadDescriptor getBulkLoadDescriptor(Cell cell) throws IOException {
if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) { if (CellUtil.matchingColumn(cell, METAFAMILY, BULK_LOAD)) {
return WALProtos.BulkLoadDescriptor.parseFrom(cell.getValue()); return WALProtos.BulkLoadDescriptor.parseFrom(CellUtil.cloneValue(cell));
} }
return null; return null;
} }

View File

@ -23,6 +23,7 @@ import java.util.NavigableMap;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WAL.Entry;
@ -44,8 +45,8 @@ public class ScopeWALEntryFilter implements WALEntryFilter {
Cell cell = cells.get(i); Cell cell = cells.get(i);
// The scope will be null or empty if // The scope will be null or empty if
// there's nothing to replicate in that WALEdit // there's nothing to replicate in that WALEdit
if (!scopes.containsKey(cell.getFamily()) byte[] fam = CellUtil.cloneFamily(cell);
|| scopes.get(cell.getFamily()) == HConstants.REPLICATION_SCOPE_LOCAL) { if (!scopes.containsKey(fam) || scopes.get(fam) == HConstants.REPLICATION_SCOPE_LOCAL) {
cells.remove(i); cells.remove(i);
} }
} }

View File

@ -25,9 +25,10 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
public class TableCfWALEntryFilter implements WALEntryFilter { public class TableCfWALEntryFilter implements WALEntryFilter {
@ -62,7 +63,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
Cell cell = cells.get(i); Cell cell = cells.get(i);
// ignore(remove) kv if its cf isn't in the replicable cf list // ignore(remove) kv if its cf isn't in the replicable cf list
// (empty cfs means all cfs of this table are replicable) // (empty cfs means all cfs of this table are replicable)
if ((cfs != null && !cfs.contains(Bytes.toString(cell.getFamily())))) { if ((cfs != null && !cfs.contains(Bytes.toString(CellUtil.cloneFamily(cell))))) {
cells.remove(i); cells.remove(i);
} }
} }

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -354,7 +355,7 @@ public class RegionReplicaReplicationEndpoint extends HBaseReplicationEndpoint {
} }
sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(), sinkWriter.append(buffer.getTableName(), buffer.getEncodedRegionName(),
entries.get(0).getEdit().getCells().get(0).getRow(), entries); CellUtil.cloneRow(entries.get(0).getEdit().getCells().get(0)), entries);
} }
@Override @Override

View File

@ -244,7 +244,7 @@ public class Replication extends WALActionsListener.Base implements
new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR); new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
byte[] family; byte[] family;
for (Cell cell : logEdit.getCells()) { for (Cell cell : logEdit.getCells()) {
family = cell.getFamily(); family = CellUtil.cloneFamily(cell);
// This is expected and the KV should not be replicated // This is expected and the KV should not be replicated
if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue; if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;
// Unexpected, has a tendency to happen in unit tests // Unexpected, has a tendency to happen in unit tests

View File

@ -329,9 +329,9 @@ public class AccessController extends BaseMasterAndRegionObserver
List<KeyValue> kvList = (List<KeyValue>)family.getValue(); List<KeyValue> kvList = (List<KeyValue>)family.getValue();
for (KeyValue kv : kvList) { for (KeyValue kv : kvList) {
if (!authManager.authorize(user, tableName, family.getKey(), if (!authManager.authorize(user, tableName, family.getKey(),
kv.getQualifier(), permRequest)) { CellUtil.cloneQualifier(kv), permRequest)) {
return AuthResult.deny(request, "Failed qualifier check", user, return AuthResult.deny(request, "Failed qualifier check", user, permRequest,
permRequest, tableName, makeFamilyMap(family.getKey(), kv.getQualifier())); tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv)));
} }
} }
} }
@ -749,7 +749,7 @@ public class AccessController extends BaseMasterAndRegionObserver
} }
} }
} else if (entry.getValue() == null) { } else if (entry.getValue() == null) {
get.addFamily(col); get.addFamily(col);
} else { } else {
throw new RuntimeException("Unhandled collection type " + throw new RuntimeException("Unhandled collection type " +
entry.getValue().getClass().getName()); entry.getValue().getClass().getName());
@ -1308,7 +1308,7 @@ public class AccessController extends BaseMasterAndRegionObserver
@Override @Override
public void preModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, public void preModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx,
NamespaceDescriptor ns) throws IOException { NamespaceDescriptor ns) throws IOException {
// We require only global permission so that // We require only global permission so that
// a user with NS admin cannot altering namespace configurations. i.e. namespace quota // a user with NS admin cannot altering namespace configurations. i.e. namespace quota
requireGlobalPermission("modifyNamespace", Action.ADMIN, ns.getName()); requireGlobalPermission("modifyNamespace", Action.ADMIN, ns.getName());
} }

View File

@ -393,7 +393,7 @@ public class HBaseFsck extends Configured implements Closeable {
LOG.info("Failed to create lock file " + hbckLockFilePath.getName() LOG.info("Failed to create lock file " + hbckLockFilePath.getName()
+ ", try=" + (retryCounter.getAttemptTimes() + 1) + " of " + ", try=" + (retryCounter.getAttemptTimes() + 1) + " of "
+ retryCounter.getMaxAttempts()); + retryCounter.getMaxAttempts());
LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(), LOG.debug("Failed to create lock file " + hbckLockFilePath.getName(),
ioe); ioe);
try { try {
exception = ioe; exception = ioe;
@ -880,7 +880,7 @@ public class HBaseFsck extends Configured implements Closeable {
hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf()); hf = HFile.createReader(fs, hfile.getPath(), cacheConf, getConf());
hf.loadFileInfo(); hf.loadFileInfo();
Cell startKv = hf.getFirstKey(); Cell startKv = hf.getFirstKey();
start = startKv.getRow(); start = CellUtil.cloneRow(startKv);
Cell endKv = hf.getLastKey(); Cell endKv = hf.getLastKey();
end = CellUtil.cloneRow(endKv); end = CellUtil.cloneRow(endKv);
} catch (IOException ioe) { } catch (IOException ioe) {
@ -2685,10 +2685,10 @@ public class HBaseFsck extends Configured implements Closeable {
} }
regionsFromMeta = Ordering.natural().immutableSortedCopy(regions); regionsFromMeta = Ordering.natural().immutableSortedCopy(regions);
} }
return regionsFromMeta; return regionsFromMeta;
} }
private class IntegrityFixSuggester extends TableIntegrityErrorHandlerImpl { private class IntegrityFixSuggester extends TableIntegrityErrorHandlerImpl {
ErrorReporter errors; ErrorReporter errors;

View File

@ -33,8 +33,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -43,14 +41,14 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
// imports for things that haven't moved yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
/** /**
* WALPrettyPrinter prints the contents of a given WAL with a variety of * WALPrettyPrinter prints the contents of a given WAL with a variety of
* options affecting formatting and extent of content. * options affecting formatting and extent of content.
@ -245,7 +243,7 @@ public class WALPrettyPrinter {
} }
WAL.Reader log = WALFactory.createReader(fs, p, conf); WAL.Reader log = WALFactory.createReader(fs, p, conf);
if (log instanceof ProtobufLogReader) { if (log instanceof ProtobufLogReader) {
List<String> writerClsNames = ((ProtobufLogReader) log).getWriterClsNames(); List<String> writerClsNames = ((ProtobufLogReader) log).getWriterClsNames();
if (writerClsNames != null && writerClsNames.size() > 0) { if (writerClsNames != null && writerClsNames.size() > 0) {
@ -258,18 +256,18 @@ public class WALPrettyPrinter {
} }
out.println(); out.println();
} }
String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName(); String cellCodecClsName = ((ProtobufLogReader) log).getCodecClsName();
if (cellCodecClsName != null) { if (cellCodecClsName != null) {
out.println("Cell Codec Class: " + cellCodecClsName); out.println("Cell Codec Class: " + cellCodecClsName);
} }
} }
if (outputJSON && !persistentOutput) { if (outputJSON && !persistentOutput) {
out.print("["); out.print("[");
firstTxn = true; firstTxn = true;
} }
try { try {
WAL.Entry entry; WAL.Entry entry;
while ((entry = log.next()) != null) { while ((entry = log.next()) != null) {
@ -288,7 +286,7 @@ public class WALPrettyPrinter {
for (Cell cell : edit.getCells()) { for (Cell cell : edit.getCells()) {
// add atomic operation to txn // add atomic operation to txn
Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell)); Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue())); if (outputValues) op.put("value", Bytes.toStringBinary(CellUtil.cloneValue(cell)));
// check row output filter // check row output filter
if (row == null || ((String) op.get("row")).equals(row)) { if (row == null || ((String) op.get("row")).equals(row)) {
actions.add(op); actions.add(op);

View File

@ -3744,11 +3744,11 @@ public class TestFromClientSide {
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0); KeyValue kv = (KeyValue)put.getFamilyCellMap().get(CONTENTS_FAMILY).get(0);
assertTrue(Bytes.equals(kv.getFamily(), CONTENTS_FAMILY)); assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), CONTENTS_FAMILY));
// will it return null or an empty byte array? // will it return null or an empty byte array?
assertTrue(Bytes.equals(kv.getQualifier(), new byte[0])); assertTrue(Bytes.equals(CellUtil.cloneQualifier(kv), new byte[0]));
assertTrue(Bytes.equals(kv.getValue(), value)); assertTrue(Bytes.equals(CellUtil.cloneValue(kv), value));
table.put(put); table.put(put);
@ -5335,7 +5335,7 @@ public class TestFromClientSide {
assertEquals(1, regionsList.size()); assertEquals(1, regionsList.size());
} }
} }
private List<HRegionLocation> getRegionsInRange(TableName tableName, byte[] startKey, private List<HRegionLocation> getRegionsInRange(TableName tableName, byte[] startKey,
byte[] endKey) throws IOException { byte[] endKey) throws IOException {
List<HRegionLocation> regionsInRange = new ArrayList<HRegionLocation>(); List<HRegionLocation> regionsInRange = new ArrayList<HRegionLocation>();
@ -5778,8 +5778,11 @@ public class TestFromClientSide {
int expectedIndex = 5; int expectedIndex = 5;
for (Result result : scanner) { for (Result result : scanner) {
assertEquals(result.size(), 1); assertEquals(result.size(), 1);
assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[expectedIndex])); Cell c = result.rawCells()[0];
assertTrue(Bytes.equals(result.rawCells()[0].getQualifier(), QUALIFIERS[expectedIndex])); assertTrue(Bytes.equals(c.getRowArray(), c.getRowOffset(), c.getRowLength(),
ROWS[expectedIndex], 0, ROWS[expectedIndex].length));
assertTrue(Bytes.equals(c.getQualifierArray(), c.getQualifierOffset(),
c.getQualifierLength(), QUALIFIERS[expectedIndex], 0, QUALIFIERS[expectedIndex].length));
expectedIndex--; expectedIndex--;
} }
assertEquals(expectedIndex, 0); assertEquals(expectedIndex, 0);
@ -5817,7 +5820,7 @@ public class TestFromClientSide {
for (Result result : ht.getScanner(scan)) { for (Result result : ht.getScanner(scan)) {
assertEquals(result.size(), 1); assertEquals(result.size(), 1);
assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT); assertEquals(result.rawCells()[0].getValueLength(), Bytes.SIZEOF_INT);
assertEquals(Bytes.toInt(result.rawCells()[0].getValue()), VALUE.length); assertEquals(Bytes.toInt(CellUtil.cloneValue(result.rawCells()[0])), VALUE.length);
count++; count++;
} }
assertEquals(count, 10); assertEquals(count, 10);
@ -6099,15 +6102,15 @@ public class TestFromClientSide {
result = scanner.next(); result = scanner.next();
assertTrue("Expected 2 keys but received " + result.size(), assertTrue("Expected 2 keys but received " + result.size(),
result.size() == 2); result.size() == 2);
assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[4])); assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[4]));
assertTrue(Bytes.equals(result.rawCells()[1].getRow(), ROWS[4])); assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[1]), ROWS[4]));
assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[1])); assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[1]));
assertTrue(Bytes.equals(result.rawCells()[1].getValue(), VALUES[2])); assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[1]), VALUES[2]));
result = scanner.next(); result = scanner.next();
assertTrue("Expected 1 key but received " + result.size(), assertTrue("Expected 1 key but received " + result.size(),
result.size() == 1); result.size() == 1);
assertTrue(Bytes.equals(result.rawCells()[0].getRow(), ROWS[3])); assertTrue(Bytes.equals(CellUtil.cloneRow(result.rawCells()[0]), ROWS[3]));
assertTrue(Bytes.equals(result.rawCells()[0].getValue(), VALUES[0])); assertTrue(Bytes.equals(CellUtil.cloneValue(result.rawCells()[0]), VALUES[0]));
scanner.close(); scanner.close();
ht.close(); ht.close();
} }

View File

@ -27,6 +27,7 @@ import java.util.ConcurrentModificationException;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -106,8 +107,8 @@ public class TestPutDeleteEtcCellIteration {
Cell cell = cellScanner.current(); Cell cell = cellScanner.current();
byte [] bytes = Bytes.toBytes(index++); byte [] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue)cell; KeyValue kv = (KeyValue)cell;
assertTrue(Bytes.equals(kv.getFamily(), bytes)); assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
assertTrue(Bytes.equals(kv.getValue(), bytes)); assertTrue(Bytes.equals(CellUtil.cloneValue(kv), bytes));
} }
assertEquals(COUNT, index); assertEquals(COUNT, index);
} }
@ -125,8 +126,8 @@ public class TestPutDeleteEtcCellIteration {
int value = index; int value = index;
byte [] bytes = Bytes.toBytes(index++); byte [] bytes = Bytes.toBytes(index++);
KeyValue kv = (KeyValue)cell; KeyValue kv = (KeyValue)cell;
assertTrue(Bytes.equals(kv.getFamily(), bytes)); assertTrue(Bytes.equals(CellUtil.cloneFamily(kv), bytes));
long a = Bytes.toLong(kv.getValue()); long a = Bytes.toLong(CellUtil.cloneValue(kv));
assertEquals(value, a); assertEquals(value, a);
} }
assertEquals(COUNT, index); assertEquals(COUNT, index);

View File

@ -20,19 +20,19 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WALKey;
/** /**
* Class for testing WALObserver coprocessor. * Class for testing WALObserver coprocessor.
@ -119,8 +119,8 @@ implements WALObserver {
Cell deletedCell = null; Cell deletedCell = null;
for (Cell cell : cells) { for (Cell cell : cells) {
// assume only one kv from the WALEdit matches. // assume only one kv from the WALEdit matches.
byte[] family = cell.getFamily(); byte[] family = CellUtil.cloneFamily(cell);
byte[] qulifier = cell.getQualifier(); byte[] qulifier = CellUtil.cloneQualifier(cell);
if (Arrays.equals(family, ignoredFamily) && if (Arrays.equals(family, ignoredFamily) &&
Arrays.equals(qulifier, ignoredQualifier)) { Arrays.equals(qulifier, ignoredQualifier)) {

View File

@ -66,10 +66,10 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFile.Reader; import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALKey;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
@ -221,13 +221,13 @@ public class SimpleRegionObserver extends BaseRegionObserver {
List<Mutation> metaEntries) throws IOException { List<Mutation> metaEntries) throws IOException {
ctPreSplitBeforePONR.incrementAndGet(); ctPreSplitBeforePONR.incrementAndGet();
} }
@Override @Override
public void preSplitAfterPONR( public void preSplitAfterPONR(
ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException { ObserverContext<RegionCoprocessorEnvironment> ctx) throws IOException {
ctPreSplitAfterPONR.incrementAndGet(); ctPreSplitAfterPONR.incrementAndGet();
} }
@Override @Override
public void postSplit(ObserverContext<RegionCoprocessorEnvironment> c, Region l, Region r) { public void postSplit(ObserverContext<RegionCoprocessorEnvironment> c, Region l, Region r) {
ctPostSplit.incrementAndGet(); ctPostSplit.incrementAndGet();
@ -370,7 +370,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
} }
@Override @Override
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c, public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c,
final Put put, final WALEdit edit, final Put put, final WALEdit edit,
final Durability durability) throws IOException { final Durability durability) throws IOException {
Map<byte[], List<Cell>> familyMap = put.getFamilyCellMap(); Map<byte[], List<Cell>> familyMap = put.getFamilyCellMap();
@ -384,20 +384,23 @@ public class SimpleRegionObserver extends BaseRegionObserver {
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
KeyValue kv = (KeyValue)cells.get(0); KeyValue kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
TestRegionObserverInterface.A)); kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
TestRegionObserverInterface.A.length));
cells = familyMap.get(TestRegionObserverInterface.B); cells = familyMap.get(TestRegionObserverInterface.B);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
TestRegionObserverInterface.B)); kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
TestRegionObserverInterface.B.length));
cells = familyMap.get(TestRegionObserverInterface.C); cells = familyMap.get(TestRegionObserverInterface.C);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
TestRegionObserverInterface.C)); kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
TestRegionObserverInterface.C.length));
} }
ctPrePut.incrementAndGet(); ctPrePut.incrementAndGet();
} }
@ -418,25 +421,31 @@ public class SimpleRegionObserver extends BaseRegionObserver {
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
KeyValue kv = (KeyValue)cells.get(0); KeyValue kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.A)); assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), TestRegionObserverInterface.A, 0,
TestRegionObserverInterface.A.length));
cells = familyMap.get(TestRegionObserverInterface.B); cells = familyMap.get(TestRegionObserverInterface.B);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.B)); assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), TestRegionObserverInterface.B, 0,
TestRegionObserverInterface.B.length));
cells = familyMap.get(TestRegionObserverInterface.C); cells = familyMap.get(TestRegionObserverInterface.C);
assertNotNull(cells); assertNotNull(cells);
assertNotNull(cells.get(0)); assertNotNull(cells.get(0));
// KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO
kv = (KeyValue)cells.get(0); kv = (KeyValue)cells.get(0);
assertTrue(Bytes.equals(kv.getQualifier(), TestRegionObserverInterface.C)); assertTrue(Bytes.equals(kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength(), TestRegionObserverInterface.C, 0,
TestRegionObserverInterface.C.length));
} }
ctPostPut.incrementAndGet(); ctPostPut.incrementAndGet();
} }
@Override @Override
public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c, public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final Delete delete, final WALEdit edit, final Delete delete, final WALEdit edit,
final Durability durability) throws IOException { final Durability durability) throws IOException {
Map<byte[], List<Cell>> familyMap = delete.getFamilyCellMap(); Map<byte[], List<Cell>> familyMap = delete.getFamilyCellMap();
@ -456,7 +465,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
} }
@Override @Override
public void postDelete(final ObserverContext<RegionCoprocessorEnvironment> c, public void postDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final Delete delete, final WALEdit edit, final Delete delete, final WALEdit edit,
final Durability durability) throws IOException { final Durability durability) throws IOException {
Map<byte[], List<Cell>> familyMap = delete.getFamilyCellMap(); Map<byte[], List<Cell>> familyMap = delete.getFamilyCellMap();
@ -467,7 +476,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
ctBeforeDelete.set(0); ctBeforeDelete.set(0);
ctPostDeleted.incrementAndGet(); ctPostDeleted.incrementAndGet();
} }
@Override @Override
public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c, public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException { MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
@ -604,7 +613,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
} }
@Override @Override
public Result preAppendAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e, public Result preAppendAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e,
Append append) throws IOException { Append append) throws IOException {
ctPreAppendAfterRowLock.incrementAndGet(); ctPreAppendAfterRowLock.incrementAndGet();
return null; return null;
@ -724,7 +733,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public boolean hadPostPut() { public boolean hadPostPut() {
return ctPostPut.get() > 0; return ctPostPut.get() > 0;
} }
public boolean hadPreBatchMutate() { public boolean hadPreBatchMutate() {
return ctPreBatchMutate.get() > 0; return ctPreBatchMutate.get() > 0;
} }
@ -784,7 +793,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public boolean hadPreIncrement() { public boolean hadPreIncrement() {
return ctPreIncrement.get() > 0; return ctPreIncrement.get() > 0;
} }
public boolean hadPreIncrementAfterRowLock() { public boolean hadPreIncrementAfterRowLock() {
return ctPreIncrementAfterRowLock.get() > 0; return ctPreIncrementAfterRowLock.get() > 0;
} }
@ -808,7 +817,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public boolean hadPrePreparedDeleteTS() { public boolean hadPrePreparedDeleteTS() {
return ctPrePrepareDeleteTS.get() > 0; return ctPrePrepareDeleteTS.get() > 0;
} }
public boolean hadPreWALRestore() { public boolean hadPreWALRestore() {
return ctPreWALRestore.get() > 0; return ctPreWALRestore.get() > 0;
} }
@ -874,7 +883,7 @@ public class SimpleRegionObserver extends BaseRegionObserver {
public int getCtPreSplit() { public int getCtPreSplit() {
return ctPreSplit.get(); return ctPreSplit.get();
} }
public int getCtPreSplitBeforePONR() { public int getCtPreSplitBeforePONR() {
return ctPreSplitBeforePONR.get(); return ctPreSplitBeforePONR.get();
} }

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -51,11 +52,6 @@ import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.wal.DefaultWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -63,14 +59,19 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.wal.DefaultWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
/** /**
* Tests invocation of the * Tests invocation of the
@ -216,14 +217,14 @@ public class TestWALObserver {
List<Cell> cells = edit.getCells(); List<Cell> cells = edit.getCells();
for (Cell cell : cells) { for (Cell cell : cells) {
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true; foundFamily0 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true; foundFamily2 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) { if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true; modifiedFamily1 = true;
} }
} }
@ -244,14 +245,14 @@ public class TestWALObserver {
foundFamily2 = false; foundFamily2 = false;
modifiedFamily1 = false; modifiedFamily1 = false;
for (Cell cell : cells) { for (Cell cell : cells) {
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true; foundFamily0 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true; foundFamily2 = true;
} }
if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) { if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) { if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true; modifiedFamily1 = true;
} }
} }
@ -462,7 +463,7 @@ public class TestWALObserver {
/* /*
* Creates an HRI around an HTD that has <code>tableName</code> and three * Creates an HRI around an HTD that has <code>tableName</code> and three
* column families named. * column families named.
* *
* @param tableName Name of table to use when we create HTableDescriptor. * @param tableName Name of table to use when we create HTableDescriptor.
*/ */
private HRegionInfo createBasic3FamilyHRegionInfo(final String tableName) { private HRegionInfo createBasic3FamilyHRegionInfo(final String tableName) {
@ -496,7 +497,7 @@ public class TestWALObserver {
/** /**
* Copied from HRegion. * Copied from HRegion.
* *
* @param familyMap * @param familyMap
* map of family->edits * map of family->edits
* @param walEdit * @param walEdit

View File

@ -534,7 +534,7 @@ public class TestFilter {
ArrayList<Cell> values = new ArrayList<Cell>(); ArrayList<Cell> values = new ArrayList<Cell>();
boolean isMoreResults = scanner.next(values); boolean isMoreResults = scanner.next(values);
if (!isMoreResults if (!isMoreResults
|| !Bytes.toString(values.get(0).getRow()).startsWith(prefix)) { || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) {
Assert.assertTrue( Assert.assertTrue(
"The WhileMatchFilter should now filter all remaining", "The WhileMatchFilter should now filter all remaining",
filter.filterAllRemaining()); filter.filterAllRemaining());
@ -581,7 +581,7 @@ public class TestFilter {
/** /**
* The following filter simulates a pre-0.96 filter where filterRow() is defined while * The following filter simulates a pre-0.96 filter where filterRow() is defined while
* hasFilterRow() returns false * hasFilterRow() returns false
*/ */
static class OldTestFilter extends FilterBase { static class OldTestFilter extends FilterBase {
@ -592,25 +592,25 @@ public class TestFilter {
public boolean hasFilterRow() { public boolean hasFilterRow() {
return false; return false;
} }
@Override @Override
public boolean filterRow() { public boolean filterRow() {
// always filter out rows // always filter out rows
return true; return true;
} }
@Override @Override
public ReturnCode filterKeyValue(Cell ignored) throws IOException { public ReturnCode filterKeyValue(Cell ignored) throws IOException {
return ReturnCode.INCLUDE; return ReturnCode.INCLUDE;
} }
} }
/** /**
* The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in * The following test is to ensure old(such as hbase0.94) filterRow() can be correctly fired in
* 0.96+ code base. * 0.96+ code base.
* *
* See HBASE-10366 * See HBASE-10366
* *
* @throws Exception * @throws Exception
*/ */
@Test @Test
@ -1558,7 +1558,7 @@ public class TestFilter {
}; };
for(KeyValue kv : srcKVs) { for(KeyValue kv : srcKVs) {
Put put = new Put(kv.getRow()).add(kv); Put put = new Put(CellUtil.cloneRow(kv)).add(kv);
put.setDurability(Durability.SKIP_WAL); put.setDurability(Durability.SKIP_WAL);
this.region.put(put); this.region.put(put);
} }
@ -1597,7 +1597,7 @@ public class TestFilter {
// Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0] // Add QUALIFIERS_ONE[1] to ROWS_THREE[0] with VALUES[0]
KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]); KeyValue kvA = new KeyValue(ROWS_THREE[0], FAMILIES[0], QUALIFIERS_ONE[1], VALUES[0]);
this.region.put(new Put(kvA.getRow()).add(kvA)); this.region.put(new Put(CellUtil.cloneRow(kvA)).add(kvA));
// Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true // Match VALUES[1] against QUALIFIERS_ONE[1] with filterIfMissing = true
// Expect 1 row (3) // Expect 1 row (3)
@ -1971,7 +1971,7 @@ public class TestFilter {
verifyScanFullNoValues(s, expectedKVs, useLen); verifyScanFullNoValues(s, expectedKVs, useLen);
} }
} }
/** /**
* Filter which makes sleeps for a second between each row of a scan. * Filter which makes sleeps for a second between each row of a scan.
* This can be useful for manual testing of bugs like HBASE-5973. For example: * This can be useful for manual testing of bugs like HBASE-5973. For example:
@ -1984,7 +1984,7 @@ public class TestFilter {
*/ */
public static class SlowScanFilter extends FilterBase { public static class SlowScanFilter extends FilterBase {
private static Thread ipcHandlerThread = null; private static Thread ipcHandlerThread = null;
@Override @Override
public byte [] toByteArray() {return null;} public byte [] toByteArray() {return null;}
@ -2099,5 +2099,5 @@ public class TestFilter {
WAL wal = ((HRegion)testRegion).getWAL(); WAL wal = ((HRegion)testRegion).getWAL();
((HRegion)testRegion).close(); ((HRegion)testRegion).close();
wal.close(); wal.close();
} }
} }

View File

@ -266,7 +266,7 @@ public class TestFilterList {
byte[] r1 = Bytes.toBytes("Row1"); byte[] r1 = Bytes.toBytes("Row1");
byte[] r11 = Bytes.toBytes("Row11"); byte[] r11 = Bytes.toBytes("Row11");
byte[] r2 = Bytes.toBytes("Row2"); byte[] r2 = Bytes.toBytes("Row2");
FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE); FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
flist.addFilter(new PrefixFilter(r1)); flist.addFilter(new PrefixFilter(r1));
flist.filterRowKey(KeyValueUtil.createFirstOnRow(r1)); flist.filterRowKey(KeyValueUtil.createFirstOnRow(r1));
@ -276,7 +276,7 @@ public class TestFilterList {
flist.reset(); flist.reset();
flist.filterRowKey(KeyValueUtil.createFirstOnRow(r2)); flist.filterRowKey(KeyValueUtil.createFirstOnRow(r2));
assertEquals(flist.filterKeyValue(new KeyValue(r2,r2,r2)), ReturnCode.SKIP); assertEquals(flist.filterKeyValue(new KeyValue(r2,r2,r2)), ReturnCode.SKIP);
flist = new FilterList(FilterList.Operator.MUST_PASS_ONE); flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
flist.addFilter(new AlwaysNextColFilter()); flist.addFilter(new AlwaysNextColFilter());
flist.addFilter(new PrefixFilter(r1)); flist.addFilter(new PrefixFilter(r1));
@ -298,7 +298,7 @@ public class TestFilterList {
byte[] r1 = Bytes.toBytes("Row1"); byte[] r1 = Bytes.toBytes("Row1");
byte[] r11 = Bytes.toBytes("Row11"); byte[] r11 = Bytes.toBytes("Row11");
byte[] r2 = Bytes.toBytes("Row2"); byte[] r2 = Bytes.toBytes("Row2");
FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE); FilterList flist = new FilterList(FilterList.Operator.MUST_PASS_ONE);
flist.addFilter(new AlwaysNextColFilter()); flist.addFilter(new AlwaysNextColFilter());
flist.addFilter(new InclusiveStopFilter(r1)); flist.addFilter(new InclusiveStopFilter(r1));
@ -390,7 +390,7 @@ public class TestFilterList {
Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter })); Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter }));
// INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL. // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL.
assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpOnefilterList.filterKeyValue(null)); assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpOnefilterList.filterKeyValue(null));
// INCLUDE, SKIP, INCLUDE. // INCLUDE, SKIP, INCLUDE.
assertEquals(Filter.ReturnCode.INCLUDE, mpOnefilterList.filterKeyValue(null)); assertEquals(Filter.ReturnCode.INCLUDE, mpOnefilterList.filterKeyValue(null));
// Check must pass all filter. // Check must pass all filter.
@ -398,7 +398,7 @@ public class TestFilterList {
Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter })); Arrays.asList(new Filter[] { includeFilter, alternateIncludeFilter, alternateFilter }));
// INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL. // INCLUDE, INCLUDE, INCLUDE_AND_NEXT_COL.
assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpAllfilterList.filterKeyValue(null)); assertEquals(Filter.ReturnCode.INCLUDE_AND_NEXT_COL, mpAllfilterList.filterKeyValue(null));
// INCLUDE, SKIP, INCLUDE. // INCLUDE, SKIP, INCLUDE.
assertEquals(Filter.ReturnCode.SKIP, mpAllfilterList.filterKeyValue(null)); assertEquals(Filter.ReturnCode.SKIP, mpAllfilterList.filterKeyValue(null));
} }
@ -417,7 +417,7 @@ public class TestFilterList {
public byte [] toByteArray() { public byte [] toByteArray() {
return null; return null;
} }
@Override @Override
public ReturnCode filterKeyValue(Cell ignored) throws IOException { public ReturnCode filterKeyValue(Cell ignored) throws IOException {
return ReturnCode.INCLUDE; return ReturnCode.INCLUDE;
@ -541,12 +541,13 @@ public class TestFilterList {
// Value for fam:qual1 should be stripped: // Value for fam:qual1 should be stripped:
assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1)); assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual1));
final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1)); final KeyValue transformedQual1 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual1));
assertEquals(0, transformedQual1.getValue().length); assertEquals(0, transformedQual1.getValueLength());
// Value for fam:qual2 should not be stripped: // Value for fam:qual2 should not be stripped:
assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2)); assertEquals(Filter.ReturnCode.INCLUDE, flist.filterKeyValue(kvQual2));
final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2)); final KeyValue transformedQual2 = KeyValueUtil.ensureKeyValue(flist.transformCell(kvQual2));
assertEquals("value", Bytes.toString(transformedQual2.getValue())); assertEquals("value", Bytes.toString(transformedQual2.getValueArray(),
transformedQual2.getValueOffset(), transformedQual2.getValueLength()));
// Other keys should be skipped: // Other keys should be skipped:
assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3)); assertEquals(Filter.ReturnCode.SKIP, flist.filterKeyValue(kvQual3));

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
@ -102,7 +103,7 @@ public class TestHalfStoreFileReader {
HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
r.loadFileInfo(); r.loadFileInfo();
Cell midKV = r.midkey(); Cell midKV = r.midkey();
byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow(); byte[] midkey = CellUtil.cloneRow(midKV);
//System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey)); //System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
@ -167,7 +168,7 @@ public class TestHalfStoreFileReader {
HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf); HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
r.loadFileInfo(); r.loadFileInfo();
Cell midKV = r.midkey(); Cell midKV = r.midkey();
byte[] midkey = ((KeyValue.KeyOnlyKeyValue)midKV).getRow(); byte[] midkey = CellUtil.cloneRow(midKV);
Reference bottom = new Reference(midkey, Reference.Range.bottom); Reference bottom = new Reference(midkey, Reference.Range.bottom);
Reference top = new Reference(midkey, Reference.Range.top); Reference top = new Reference(midkey, Reference.Range.top);

View File

@ -37,6 +37,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
@ -74,7 +75,7 @@ public class TestPrefixTreeEncoding {
CellComparator.COMPARATOR); CellComparator.COMPARATOR);
private static boolean formatRowNum = false; private static boolean formatRowNum = false;
@Parameters @Parameters
public static Collection<Object[]> parameters() { public static Collection<Object[]> parameters() {
List<Object[]> paramList = new ArrayList<Object[]>(); List<Object[]> paramList = new ArrayList<Object[]>();
@ -88,7 +89,7 @@ public class TestPrefixTreeEncoding {
public TestPrefixTreeEncoding(boolean includesTag) { public TestPrefixTreeEncoding(boolean includesTag) {
this.includesTag = includesTag; this.includesTag = includesTag;
} }
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
kvset.clear(); kvset.clear();
@ -132,7 +133,8 @@ public class TestPrefixTreeEncoding {
new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
.getKeyLength()), true); .getKeyLength()), true);
assertNotNull(seeker.getKeyValue()); assertNotNull(seeker.getKeyValue());
assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), seeker.getKeyValue().getRow()); assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1),
CellUtil.cloneRow(seeker.getKeyValue()));
// Seek before the last keyvalue; // Seek before the last keyvalue;
seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES); seekKey = KeyValueUtil.createFirstDeleteFamilyOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
@ -140,7 +142,8 @@ public class TestPrefixTreeEncoding {
new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey new KeyValue.KeyOnlyKeyValue(seekKey.getBuffer(), seekKey.getKeyOffset(), seekKey
.getKeyLength()), true); .getKeyLength()), true);
assertNotNull(seeker.getKeyValue()); assertNotNull(seeker.getKeyValue());
assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), seeker.getKeyValue().getRow()); assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1),
CellUtil.cloneRow(seeker.getKeyValue()));
} }
@Test @Test
@ -226,7 +229,7 @@ public class TestPrefixTreeEncoding {
onDiskBytes.length - DataBlockEncoding.ID_SIZE); onDiskBytes.length - DataBlockEncoding.ID_SIZE);
verifySeeking(seeker, readBuffer, batchId); verifySeeking(seeker, readBuffer, batchId);
} }
private void verifySeeking(EncodedSeeker encodeSeeker, private void verifySeeking(EncodedSeeker encodeSeeker,
ByteBuffer encodedData, int batchId) { ByteBuffer encodedData, int batchId) {
List<KeyValue> kvList = new ArrayList<KeyValue>(); List<KeyValue> kvList = new ArrayList<KeyValue>();

View File

@ -18,9 +18,6 @@
*/ */
package org.apache.hadoop.hbase.io.hfile; package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
@ -242,7 +239,7 @@ public class TestHFile extends HBaseTestCase {
/** /**
* test none codecs * test none codecs
* @param useTags * @param useTags
*/ */
void basicWithSomeCodec(String codec, boolean useTags) throws IOException { void basicWithSomeCodec(String codec, boolean useTags) throws IOException {
if (useTags) { if (useTags) {
@ -311,12 +308,12 @@ public class TestHFile extends HBaseTestCase {
writer.appendMetaBlock("HFileMeta" + i, new Writable() { writer.appendMetaBlock("HFileMeta" + i, new Writable() {
private int val; private int val;
public Writable setVal(int val) { this.val = val; return this; } public Writable setVal(int val) { this.val = val; return this; }
@Override @Override
public void write(DataOutput out) throws IOException { public void write(DataOutput out) throws IOException {
out.write(("something to test" + val).getBytes()); out.write(("something to test" + val).getBytes());
} }
@Override @Override
public void readFields(DataInput in) throws IOException { } public void readFields(DataInput in) throws IOException { }
}.setVal(i)); }.setVal(i));
@ -330,7 +327,7 @@ public class TestHFile extends HBaseTestCase {
private void readNumMetablocks(Reader reader, int n) throws IOException { private void readNumMetablocks(Reader reader, int n) throws IOException {
for (int i = 0; i < n; i++) { for (int i = 0; i < n; i++) {
ByteBuffer actual = reader.getMetaBlock("HFileMeta" + i, false); ByteBuffer actual = reader.getMetaBlock("HFileMeta" + i, false);
ByteBuffer expected = ByteBuffer expected =
ByteBuffer.wrap(("something to test" + i).getBytes()); ByteBuffer.wrap(("something to test" + i).getBytes());
assertEquals("failed to match metadata", assertEquals("failed to match metadata",
Bytes.toStringBinary(expected), Bytes.toStringBinary(actual)); Bytes.toStringBinary(expected), Bytes.toStringBinary(actual));
@ -377,7 +374,7 @@ public class TestHFile extends HBaseTestCase {
@Test @Test
public void testNullMetaBlocks() throws Exception { public void testNullMetaBlocks() throws Exception {
if (cacheConf == null) cacheConf = new CacheConfig(conf); if (cacheConf == null) cacheConf = new CacheConfig(conf);
for (Compression.Algorithm compressAlgo : for (Compression.Algorithm compressAlgo :
HBaseTestingUtility.COMPRESSION_ALGORITHMS) { HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile"); Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
FSDataOutputStream fout = createFSOutput(mFile); FSDataOutputStream fout = createFSOutput(mFile);
@ -512,8 +509,8 @@ public class TestHFile extends HBaseTestCase {
newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2); newKey = HFileWriterImpl.getMidpoint(keyComparator, kv1, kv2);
assertTrue(keyComparator.compare(kv1, newKey) < 0); assertTrue(keyComparator.compare(kv1, newKey) < 0);
assertTrue((keyComparator.compare(kv2, newKey)) > 0); assertTrue((keyComparator.compare(kv2, newKey)) > 0);
assertTrue(Arrays.equals(newKey.getFamily(), family)); assertTrue(Arrays.equals(CellUtil.cloneFamily(newKey), family));
assertTrue(Arrays.equals(newKey.getQualifier(), qualB)); assertTrue(Arrays.equals(CellUtil.cloneQualifier(newKey), qualB));
assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP); assertTrue(newKey.getTimestamp() == HConstants.LATEST_TIMESTAMP);
assertTrue(newKey.getTypeByte() == Type.Maximum.getCode()); assertTrue(newKey.getTypeByte() == Type.Maximum.getCode());

View File

@ -48,6 +48,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
@ -131,7 +132,7 @@ public class TestHFileBlock {
// generate it or repeat, it should compress well // generate it or repeat, it should compress well
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
row = keyValues.get(randomizer.nextInt(keyValues.size())).getRow(); row = CellUtil.cloneRow(keyValues.get(randomizer.nextInt(keyValues.size())));
} else { } else {
row = new byte[FIELD_LENGTH]; row = new byte[FIELD_LENGTH];
randomizer.nextBytes(row); randomizer.nextBytes(row);
@ -140,17 +141,16 @@ public class TestHFileBlock {
family = new byte[FIELD_LENGTH]; family = new byte[FIELD_LENGTH];
randomizer.nextBytes(family); randomizer.nextBytes(family);
} else { } else {
family = keyValues.get(0).getFamily(); family = CellUtil.cloneFamily(keyValues.get(0));
} }
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
qualifier = keyValues.get( qualifier = CellUtil.cloneQualifier(keyValues.get(randomizer.nextInt(keyValues.size())));
randomizer.nextInt(keyValues.size())).getQualifier();
} else { } else {
qualifier = new byte[FIELD_LENGTH]; qualifier = new byte[FIELD_LENGTH];
randomizer.nextBytes(qualifier); randomizer.nextBytes(qualifier);
} }
if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) { if (0 < i && randomizer.nextFloat() < CHANCE_TO_REPEAT) {
value = keyValues.get(randomizer.nextInt(keyValues.size())).getValue(); value = CellUtil.cloneValue(keyValues.get(randomizer.nextInt(keyValues.size())));
} else { } else {
value = new byte[FIELD_LENGTH]; value = new byte[FIELD_LENGTH];
randomizer.nextBytes(value); randomizer.nextBytes(value);
@ -837,7 +837,7 @@ public class TestHFileBlock {
.withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
.withChecksumType(ChecksumType.NULL).build(); .withChecksumType(ChecksumType.NULL).build();
HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf, HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
HFileBlock.FILL_HEADER, -1, HFileBlock.FILL_HEADER, -1,
0, meta); 0, meta);
long byteBufferExpectedSize = long byteBufferExpectedSize =
ClassSize.align(ClassSize.estimateBase(buf.getClass(), true) ClassSize.align(ClassSize.estimateBase(buf.getClass(), true)

View File

@ -480,7 +480,7 @@ public class TestHFileBlockCompatibility {
this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream); this.dataBlockEncoder.encode(kv, dataBlockEncodingCtx, this.userDataStream);
this.unencodedDataSizeWritten += kv.getLength(); this.unencodedDataSizeWritten += kv.getLength();
if (dataBlockEncodingCtx.getHFileContext().isIncludesMvcc()) { if (dataBlockEncodingCtx.getHFileContext().isIncludesMvcc()) {
this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getMvccVersion()); this.unencodedDataSizeWritten += WritableUtils.getVIntSize(kv.getSequenceId());
} }
} }

View File

@ -125,7 +125,7 @@ public class TestHFileWriterV2 {
writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris")); writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));
writer.close(); writer.close();
FSDataInputStream fsdis = fs.open(hfilePath); FSDataInputStream fsdis = fs.open(hfilePath);
@ -144,7 +144,7 @@ public class TestHFileWriterV2 {
.withIncludesTags(false) .withIncludesTags(false)
.withCompression(compressAlgo) .withCompression(compressAlgo)
.build(); .build();
HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(fsdis, fileSize, meta); HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(fsdis, fileSize, meta);
// Comparator class name is stored in the trailer in version 2. // Comparator class name is stored in the trailer in version 2.
CellComparator comparator = trailer.createComparator(); CellComparator comparator = trailer.createComparator();
@ -162,12 +162,12 @@ public class TestHFileWriterV2 {
dataBlockIndexReader.readMultiLevelIndexRoot( dataBlockIndexReader.readMultiLevelIndexRoot(
blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX),
trailer.getDataIndexCount()); trailer.getDataIndexCount());
if (findMidKey) { if (findMidKey) {
Cell midkey = dataBlockIndexReader.midkey(); Cell midkey = dataBlockIndexReader.midkey();
assertNotNull("Midkey should not be null", midkey); assertNotNull("Midkey should not be null", midkey);
} }
// Meta index. // Meta index.
metaBlockIndexReader.readRootIndex( metaBlockIndexReader.readRootIndex(
blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX) blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX)
@ -215,8 +215,10 @@ public class TestHFileWriterV2 {
} }
// A brute-force check to see that all keys and values are correct. // A brute-force check to see that all keys and values are correct.
assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0); KeyValue kv = keyValues.get(entriesRead);
assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0); assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
kv.getValueLength()) == 0);
++entriesRead; ++entriesRead;
} }

View File

@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
@ -160,7 +159,7 @@ public class TestHFileWriterV3 {
writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris")); writer.appendMetaBlock("CAPITAL_OF_FRANCE", new Text("Paris"));
writer.close(); writer.close();
FSDataInputStream fsdis = fs.open(hfilePath); FSDataInputStream fsdis = fs.open(hfilePath);
@ -192,12 +191,12 @@ public class TestHFileWriterV3 {
// the root level. // the root level.
dataBlockIndexReader.readMultiLevelIndexRoot( dataBlockIndexReader.readMultiLevelIndexRoot(
blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount()); blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX), trailer.getDataIndexCount());
if (findMidKey) { if (findMidKey) {
Cell midkey = dataBlockIndexReader.midkey(); Cell midkey = dataBlockIndexReader.midkey();
assertNotNull("Midkey should not be null", midkey); assertNotNull("Midkey should not be null", midkey);
} }
// Meta index. // Meta index.
metaBlockIndexReader.readRootIndex( metaBlockIndexReader.readRootIndex(
blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX) blockIter.nextBlockWithBlockType(BlockType.ROOT_INDEX)
@ -240,7 +239,7 @@ public class TestHFileWriterV3 {
tagValue = new byte[tagLen]; tagValue = new byte[tagLen];
buf.get(tagValue); buf.get(tagValue);
} }
if (includeMemstoreTS) { if (includeMemstoreTS) {
ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset() ByteArrayInputStream byte_input = new ByteArrayInputStream(buf.array(), buf.arrayOffset()
+ buf.position(), buf.remaining()); + buf.position(), buf.remaining());
@ -251,11 +250,13 @@ public class TestHFileWriterV3 {
} }
// A brute-force check to see that all keys and values are correct. // A brute-force check to see that all keys and values are correct.
assertTrue(Bytes.compareTo(key, keyValues.get(entriesRead).getKey()) == 0); KeyValue kv = keyValues.get(entriesRead);
assertTrue(Bytes.compareTo(value, keyValues.get(entriesRead).getValue()) == 0); assertTrue(Bytes.compareTo(key, kv.getKey()) == 0);
assertTrue(Bytes.compareTo(value, 0, value.length, kv.getValueArray(), kv.getValueOffset(),
kv.getValueLength()) == 0);
if (useTags) { if (useTags) {
assertNotNull(tagValue); assertNotNull(tagValue);
KeyValue tkv = keyValues.get(entriesRead); KeyValue tkv = kv;
assertEquals(tagValue.length, tkv.getTagsLength()); assertEquals(tagValue.length, tkv.getTagsLength());
assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(), assertTrue(Bytes.compareTo(tagValue, 0, tagValue.length, tkv.getTagsArray(),
tkv.getTagsOffset(), tkv.getTagsLength()) == 0); tkv.getTagsOffset(), tkv.getTagsLength()) == 0);

View File

@ -70,11 +70,11 @@ public class TestSeekTo {
return paramList; return paramList;
} }
static boolean switchKVs = false; static boolean switchKVs = false;
public TestSeekTo(DataBlockEncoding encoding) { public TestSeekTo(DataBlockEncoding encoding) {
this.encoding = encoding; this.encoding = encoding;
} }
@Before @Before
public void setUp() { public void setUp() {
//reset //reset
@ -107,7 +107,8 @@ public class TestSeekTo {
} }
} }
static String toRowStr(Cell kv) { static String toRowStr(Cell kv) {
return Bytes.toString(KeyValueUtil.ensureKeyValue(kv).getRow()); KeyValue c = KeyValueUtil.ensureKeyValue(kv);
return Bytes.toString(c.getRowArray(), c.getRowOffset(), c.getRowLength());
} }
Path makeNewFile(TagUsage tagUsage) throws IOException { Path makeNewFile(TagUsage tagUsage) throws IOException {
@ -338,7 +339,7 @@ public class TestSeekTo {
Configuration conf = TEST_UTIL.getConfiguration(); Configuration conf = TEST_UTIL.getConfiguration();
HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf); HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), conf);
reader.loadFileInfo(); reader.loadFileInfo();
HFileBlockIndex.BlockIndexReader blockIndexReader = HFileBlockIndex.BlockIndexReader blockIndexReader =
reader.getDataBlockIndexReader(); reader.getDataBlockIndexReader();
System.out.println(blockIndexReader.toString()); System.out.println(blockIndexReader.toString());
// falls before the start of the file. // falls before the start of the file.

View File

@ -34,8 +34,6 @@ import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import junit.framework.Assert;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -91,6 +89,8 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.Mockito; import org.mockito.Mockito;
import junit.framework.Assert;
/** /**
* Simple test for {@link KeyValueSortReducer} and {@link HFileOutputFormat}. * Simple test for {@link KeyValueSortReducer} and {@link HFileOutputFormat}.
* Sets up and runs a mapreduce job that writes hfile output. * Sets up and runs a mapreduce job that writes hfile output.
@ -201,8 +201,11 @@ public class TestHFileOutputFormat {
KeyValue original = kv.clone(); KeyValue original = kv.clone();
writer.write(new ImmutableBytesWritable(), kv); writer.write(new ImmutableBytesWritable(), kv);
assertFalse(original.equals(kv)); assertFalse(original.equals(kv));
assertTrue(Bytes.equals(original.getRow(), kv.getRow())); assertTrue(Bytes.equals(original.getRowArray(), original.getRowOffset(),
assertTrue(CellUtil.matchingColumn(original, kv.getFamily(), kv.getQualifier())); original.getRowLength(), kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
assertTrue(CellUtil.matchingColumn(original, kv.getFamilyArray(), kv.getFamilyOffset(),
kv.getFamilyLength(), kv.getQualifierArray(), kv.getQualifierOffset(),
kv.getQualifierLength()));
assertNotSame(original.getTimestamp(), kv.getTimestamp()); assertNotSame(original.getTimestamp(), kv.getTimestamp());
assertNotSame(HConstants.LATEST_TIMESTAMP, kv.getTimestamp()); assertNotSame(HConstants.LATEST_TIMESTAMP, kv.getTimestamp());

View File

@ -344,8 +344,8 @@ public class TestImportExport {
assertEquals(now, res[6].getTimestamp()); assertEquals(now, res[6].getTimestamp());
t.close(); t.close();
} }
@Test @Test
public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception { public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
TableName EXPORT_TABLE = TableName EXPORT_TABLE =
@ -376,8 +376,8 @@ public class TestImportExport {
//Add second Delete family marker //Add second Delete family marker
d = new Delete(ROW1, now+7); d = new Delete(ROW1, now+7);
exportT.delete(d); exportT.delete(d);
String[] args = new String[] { String[] args = new String[] {
"-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(), "-D" + Export.RAW_SCAN + "=true", EXPORT_TABLE.getNameAsString(),
FQ_OUTPUT_DIR, FQ_OUTPUT_DIR,
@ -403,10 +403,10 @@ public class TestImportExport {
Scan s = new Scan(); Scan s = new Scan();
s.setMaxVersions(); s.setMaxVersions();
s.setRaw(true); s.setRaw(true);
ResultScanner importedTScanner = importT.getScanner(s); ResultScanner importedTScanner = importT.getScanner(s);
Result importedTResult = importedTScanner.next(); Result importedTResult = importedTScanner.next();
ResultScanner exportedTScanner = exportT.getScanner(s); ResultScanner exportedTScanner = exportT.getScanner(s);
Result exportedTResult = exportedTScanner.next(); Result exportedTResult = exportedTScanner.next();
try try
@ -504,7 +504,7 @@ public class TestImportExport {
results.close(); results.close();
return count; return count;
} }
/** /**
* test main method. Import should print help and call System.exit * test main method. Import should print help and call System.exit
*/ */
@ -586,7 +586,7 @@ public class TestImportExport {
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
KeyValue key = (KeyValue) invocation.getArguments()[1]; KeyValue key = (KeyValue) invocation.getArguments()[1];
assertEquals("Key", Bytes.toString(writer.get())); assertEquals("Key", Bytes.toString(writer.get()));
assertEquals("row", Bytes.toString(key.getRow())); assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
return null; return null;
} }
}).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class));
@ -616,7 +616,7 @@ public class TestImportExport {
args.add("param2"); args.add("param2");
Import.addFilterAndArguments(configuration, FilterBase.class, args); Import.addFilterAndArguments(configuration, FilterBase.class, args);
assertEquals("org.apache.hadoop.hbase.filter.FilterBase", assertEquals("org.apache.hadoop.hbase.filter.FilterBase",
configuration.get(Import.FILTER_CLASS_CONF_KEY)); configuration.get(Import.FILTER_CLASS_CONF_KEY));
assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY)); assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY));
} }
@ -700,5 +700,5 @@ public class TestImportExport {
public boolean isWALVisited() { public boolean isWALVisited() {
return isVisited; return isVisited;
} }
} }
} }

View File

@ -33,11 +33,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -45,13 +45,13 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper; import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.hbase.util.LauncherSecurityManager;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
@ -123,7 +123,7 @@ public class TestWALPlayer {
new String[] {walInputDir, TABLENAME1.getNameAsString(), new String[] {walInputDir, TABLENAME1.getNameAsString(),
TABLENAME2.getNameAsString() })); TABLENAME2.getNameAsString() }));
// verify the WAL was player into table 2 // verify the WAL was player into table 2
Get g = new Get(ROW); Get g = new Get(ROW);
Result r = t2.get(g); Result r = t2.get(g);
@ -151,15 +151,13 @@ public class TestWALPlayer {
WALKey key = mock(WALKey.class); WALKey key = mock(WALKey.class);
when(key.getTablename()).thenReturn(TableName.valueOf("table")); when(key.getTablename()).thenReturn(TableName.valueOf("table"));
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue>.Context context = Mapper<WALKey, WALEdit, ImmutableBytesWritable, KeyValue>.Context context = mock(Context.class);
mock(Context.class);
when(context.getConfiguration()).thenReturn(configuration); when(context.getConfiguration()).thenReturn(configuration);
WALEdit value = mock(WALEdit.class); WALEdit value = mock(WALEdit.class);
ArrayList<Cell> values = new ArrayList<Cell>(); ArrayList<Cell> values = new ArrayList<Cell>();
KeyValue kv1 = mock(KeyValue.class); KeyValue kv1 = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), null);
when(kv1.getFamily()).thenReturn(Bytes.toBytes("family"));
when(kv1.getRow()).thenReturn(Bytes.toBytes("row"));
values.add(kv1); values.add(kv1);
when(value.getCells()).thenReturn(values); when(value.getCells()).thenReturn(values);
mapper.setup(context); mapper.setup(context);
@ -171,7 +169,7 @@ public class TestWALPlayer {
ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
KeyValue key = (KeyValue) invocation.getArguments()[1]; KeyValue key = (KeyValue) invocation.getArguments()[1];
assertEquals("row", Bytes.toString(writer.get())); assertEquals("row", Bytes.toString(writer.get()));
assertEquals("row", Bytes.toString(key.getRow())); assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
return null; return null;
} }
}).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); }).when(context).write(any(ImmutableBytesWritable.class), any(KeyValue.class));

View File

@ -100,7 +100,7 @@ public class TestWALRecordReader {
fs = TEST_UTIL.getDFSCluster().getFileSystem(); fs = TEST_UTIL.getDFSCluster().getFileSystem();
hbaseDir = TEST_UTIL.createRootDir(); hbaseDir = TEST_UTIL.createRootDir();
logDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME); logDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME);
htd = new HTableDescriptor(tableName); htd = new HTableDescriptor(tableName);
@ -152,7 +152,7 @@ public class TestWALRecordReader {
walfactory.shutdown(); walfactory.shutdown();
LOG.info("Closed WAL " + log.toString()); LOG.info("Closed WAL " + log.toString());
WALInputFormat input = new WALInputFormat(); WALInputFormat input = new WALInputFormat();
Configuration jobConf = new Configuration(conf); Configuration jobConf = new Configuration(conf);
jobConf.set("mapreduce.input.fileinputformat.inputdir", logDir.toString()); jobConf.set("mapreduce.input.fileinputformat.inputdir", logDir.toString());
@ -257,9 +257,14 @@ public class TestWALRecordReader {
for (byte[] column : columns) { for (byte[] column : columns) {
assertTrue(reader.nextKeyValue()); assertTrue(reader.nextKeyValue());
Cell cell = reader.getCurrentValue().getCells().get(0); Cell cell = reader.getCurrentValue().getCells().get(0);
if (!Bytes.equals(column, cell.getQualifier())) { if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(),
assertTrue("expected [" + Bytes.toString(column) + "], actual [" cell.getQualifierOffset(), cell.getQualifierLength())) {
+ Bytes.toString(cell.getQualifier()) + "]", false); assertTrue(
"expected ["
+ Bytes.toString(column)
+ "], actual ["
+ Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength()) + "]", false);
} }
} }
assertFalse(reader.nextKeyValue()); assertFalse(reader.nextKeyValue());

View File

@ -285,7 +285,7 @@ public class TestBulkLoad {
@Override @Override
protected boolean matchesSafely(WALEdit item) { protected boolean matchesSafely(WALEdit item) {
assertTrue(Arrays.equals(item.getCells().get(0).getQualifier(), typeBytes)); assertTrue(Arrays.equals(CellUtil.cloneQualifier(item.getCells().get(0)), typeBytes));
BulkLoadDescriptor desc; BulkLoadDescriptor desc;
try { try {
desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0)); desc = WALEdit.getBulkLoadDescriptor(item.getCells().get(0));

View File

@ -25,6 +25,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -49,15 +50,18 @@ public class TestCellSkipListSet extends TestCase {
assertEquals(1, this.csls.size()); assertEquals(1, this.csls.size());
Cell first = this.csls.first(); Cell first = this.csls.first();
assertTrue(kv.equals(first)); assertTrue(kv.equals(first));
assertTrue(Bytes.equals(kv.getValue(), first.getValue())); assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(),
first.getValueArray(), first.getValueOffset(), first.getValueLength()));
// Now try overwritting // Now try overwritting
byte [] overwriteValue = Bytes.toBytes("overwrite"); byte [] overwriteValue = Bytes.toBytes("overwrite");
KeyValue overwrite = new KeyValue(bytes, bytes, bytes, overwriteValue); KeyValue overwrite = new KeyValue(bytes, bytes, bytes, overwriteValue);
this.csls.add(overwrite); this.csls.add(overwrite);
assertEquals(1, this.csls.size()); assertEquals(1, this.csls.size());
first = this.csls.first(); first = this.csls.first();
assertTrue(Bytes.equals(overwrite.getValue(), first.getValue())); assertTrue(Bytes.equals(overwrite.getValueArray(), overwrite.getValueOffset(),
assertFalse(Bytes.equals(overwrite.getValue(), kv.getValue())); overwrite.getValueLength(), first.getValueArray(), first.getValueOffset(),
first.getValueLength()));
assertFalse(Bytes.equals(CellUtil.cloneValue(overwrite), CellUtil.cloneValue(kv)));
} }
public void testIterator() throws Exception { public void testIterator() throws Exception {
@ -71,8 +75,10 @@ public class TestCellSkipListSet extends TestCase {
// Assert that we added 'total' values and that they are in order // Assert that we added 'total' values and that they are in order
int count = 0; int count = 0;
for (Cell kv: this.csls) { for (Cell kv: this.csls) {
assertEquals("" + count, Bytes.toString(kv.getQualifier())); assertEquals("" + count,
assertTrue(Bytes.equals(kv.getValue(), value1)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1,
0, value1.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -83,9 +89,11 @@ public class TestCellSkipListSet extends TestCase {
// Assert that we added 'total' values and that they are in order and that // Assert that we added 'total' values and that they are in order and that
// we are getting back value2 // we are getting back value2
count = 0; count = 0;
for (Cell kv: this.csls) { for (Cell kv : this.csls) {
assertEquals("" + count, Bytes.toString(kv.getQualifier())); assertEquals("" + count,
assertTrue(Bytes.equals(kv.getValue(), value2)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2,
0, value2.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -103,8 +111,10 @@ public class TestCellSkipListSet extends TestCase {
int count = 0; int count = 0;
for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) { for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) {
Cell kv = i.next(); Cell kv = i.next();
assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); assertEquals("" + (total - (count + 1)),
assertTrue(Bytes.equals(kv.getValue(), value1)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value1,
0, value1.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -117,8 +127,10 @@ public class TestCellSkipListSet extends TestCase {
count = 0; count = 0;
for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) { for (Iterator<Cell> i = this.csls.descendingIterator(); i.hasNext();) {
Cell kv = i.next(); Cell kv = i.next();
assertEquals("" + (total - (count + 1)), Bytes.toString(kv.getQualifier())); assertEquals("" + (total - (count + 1)),
assertTrue(Bytes.equals(kv.getValue(), value2)); Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
assertTrue(Bytes.equals(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength(), value2,
0, value2.length));
count++; count++;
} }
assertEquals(total, count); assertEquals(total, count);
@ -145,8 +157,10 @@ public class TestCellSkipListSet extends TestCase {
this.csls.add(new KeyValue(bytes, bytes, Bytes.toBytes("" + i), value2)); this.csls.add(new KeyValue(bytes, bytes, Bytes.toBytes("" + i), value2));
} }
tail = this.csls.tailSet(splitter); tail = this.csls.tailSet(splitter);
assertTrue(Bytes.equals(tail.first().getValue(), value2)); assertTrue(Bytes.equals(tail.first().getValueArray(), tail.first().getValueOffset(),
tail.first().getValueLength(), value2, 0, value2.length));
head = this.csls.headSet(splitter); head = this.csls.headSet(splitter);
assertTrue(Bytes.equals(head.first().getValue(), value2)); assertTrue(Bytes.equals(head.first().getValueArray(), head.first().getValueOffset(),
head.first().getValueLength(), value2, 0, value2.length));
} }
} }

View File

@ -207,8 +207,8 @@ public class TestCompoundBloomFilter {
// Test for false negatives (not allowed). // Test for false negatives (not allowed).
int numChecked = 0; int numChecked = 0;
for (KeyValue kv : kvs) { for (KeyValue kv : kvs) {
byte[] row = kv.getRow(); byte[] row = CellUtil.cloneRow(kv);
boolean present = isInBloom(scanner, row, kv.getQualifier()); boolean present = isInBloom(scanner, row, CellUtil.cloneQualifier(kv));
assertTrue(testIdMsg + " Bloom filter false negative on row " assertTrue(testIdMsg + " Bloom filter false negative on row "
+ Bytes.toStringBinary(row) + " after " + numChecked + Bytes.toStringBinary(row) + " after " + numChecked
+ " successful checks", present); + " successful checks", present);
@ -358,9 +358,10 @@ public class TestCompoundBloomFilter {
KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey); KeyValue rowKV = KeyValueUtil.createKeyValueFromKey(rowKey);
KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey); KeyValue rowColKV = KeyValueUtil.createKeyValueFromKey(rowColKey);
assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp()); assertEquals(rowKV.getTimestamp(), rowColKV.getTimestamp());
assertEquals(Bytes.toStringBinary(rowKV.getRow()), assertEquals(Bytes.toStringBinary(rowKV.getRowArray(), rowKV.getRowOffset(),
Bytes.toStringBinary(rowColKV.getRow())); rowKV.getRowLength()), Bytes.toStringBinary(rowColKV.getRowArray(), rowColKV.getRowOffset(),
assertEquals(0, rowKV.getQualifier().length); rowColKV.getRowLength()));
assertEquals(0, rowKV.getQualifierLength());
} }

View File

@ -70,7 +70,7 @@ public class TestDefaultMemStore extends TestCase {
private static final int QUALIFIER_COUNT = ROW_COUNT; private static final int QUALIFIER_COUNT = ROW_COUNT;
private static final byte [] FAMILY = Bytes.toBytes("column"); private static final byte [] FAMILY = Bytes.toBytes("column");
private MultiVersionConsistencyControl mvcc; private MultiVersionConsistencyControl mvcc;
private AtomicLong startSeqNum = new AtomicLong(0); private AtomicLong startSeqNum = new AtomicLong(0);
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
@ -88,7 +88,9 @@ public class TestDefaultMemStore extends TestCase {
this.memstore.add(samekey); this.memstore.add(samekey);
Cell found = this.memstore.cellSet.first(); Cell found = this.memstore.cellSet.first();
assertEquals(1, this.memstore.cellSet.size()); assertEquals(1, this.memstore.cellSet.size());
assertTrue(Bytes.toString(found.getValue()), CellUtil.matchingValue(samekey, found)); assertTrue(
Bytes.toString(found.getValueArray(), found.getValueOffset(), found.getValueLength()),
CellUtil.matchingValue(samekey, found));
} }
/** /**
@ -179,7 +181,7 @@ public class TestDefaultMemStore extends TestCase {
/** /**
* A simple test which verifies the 3 possible states when scanning across snapshot. * A simple test which verifies the 3 possible states when scanning across snapshot.
* @throws IOException * @throws IOException
* @throws CloneNotSupportedException * @throws CloneNotSupportedException
*/ */
public void testScanAcrossSnapshot2() throws IOException, CloneNotSupportedException { public void testScanAcrossSnapshot2() throws IOException, CloneNotSupportedException {
// we are going to the scanning across snapshot with two kvs // we are going to the scanning across snapshot with two kvs
@ -843,7 +845,7 @@ public class TestDefaultMemStore extends TestCase {
assert(newSize > oldSize); assert(newSize > oldSize);
//The kv1 should be removed. //The kv1 should be removed.
assert(memstore.cellSet.size() == 2); assert(memstore.cellSet.size() == 2);
KeyValue kv4 = KeyValueTestUtil.create("r", "f", "q", 104, "v"); KeyValue kv4 = KeyValueTestUtil.create("r", "f", "q", 104, "v");
kv4.setSequenceId(1); kv4.setSequenceId(1);
l.clear(); l.add(kv4); l.clear(); l.add(kv4);
@ -855,12 +857,12 @@ public class TestDefaultMemStore extends TestCase {
} }
//////////////////////////////////// ////////////////////////////////////
// Test for periodic memstore flushes // Test for periodic memstore flushes
// based on time of oldest edit // based on time of oldest edit
//////////////////////////////////// ////////////////////////////////////
/** /**
* Tests that the timeOfOldestEdit is updated correctly for the * Tests that the timeOfOldestEdit is updated correctly for the
* various edit operations in memstore. * various edit operations in memstore.
* @throws Exception * @throws Exception
*/ */
@ -876,7 +878,7 @@ public class TestDefaultMemStore extends TestCase {
memstore.add(KeyValueTestUtil.create("r", "f", "q", 100, "v")); memstore.add(KeyValueTestUtil.create("r", "f", "q", 100, "v"));
t = memstore.timeOfOldestEdit(); t = memstore.timeOfOldestEdit();
assertTrue(t == 1234); assertTrue(t == 1234);
// snapshot() will reset timeOfOldestEdit. The method will also assert the // snapshot() will reset timeOfOldestEdit. The method will also assert the
// value is reset to Long.MAX_VALUE // value is reset to Long.MAX_VALUE
t = runSnapshot(memstore); t = runSnapshot(memstore);
@ -903,7 +905,7 @@ public class TestDefaultMemStore extends TestCase {
* Tests the HRegion.shouldFlush method - adds an edit in the memstore * Tests the HRegion.shouldFlush method - adds an edit in the memstore
* and checks that shouldFlush returns true, and another where it disables * and checks that shouldFlush returns true, and another where it disables
* the periodic flush functionality and tests whether shouldFlush returns * the periodic flush functionality and tests whether shouldFlush returns
* false. * false.
* @throws Exception * @throws Exception
*/ */
public void testShouldFlush() throws Exception { public void testShouldFlush() throws Exception {
@ -973,7 +975,7 @@ public class TestDefaultMemStore extends TestCase {
long t = 1234; long t = 1234;
@Override @Override
public long currentTime() { public long currentTime() {
return t; return t;
} }
public void setCurrentTimeMillis(long t) { public void setCurrentTimeMillis(long t) {
this.t = t; this.t = t;

View File

@ -3973,8 +3973,8 @@ public class TestHRegion {
if (previousKV != null) { if (previousKV != null) {
if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) { if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) {
LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV
+ "(memStoreTS:" + previousKV.getMvccVersion() + ")" + ", New KV: " + kv + "(memStoreTS:" + previousKV.getSequenceId() + ")" + ", New KV: " + kv
+ "(memStoreTS:" + kv.getMvccVersion() + ")"); + "(memStoreTS:" + kv.getSequenceId() + ")");
assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue)); assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue));
} }
} }
@ -5132,17 +5132,20 @@ public class TestHRegion {
List<Cell> currRow = new ArrayList<Cell>(); List<Cell> currRow = new ArrayList<Cell>();
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
} finally { } finally {
@ -5189,17 +5192,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
} finally { } finally {
@ -5243,17 +5249,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowA)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
} finally { } finally {
@ -5311,17 +5320,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
@ -5332,7 +5344,8 @@ public class TestHRegion {
scanner = region.getScanner(scan); scanner = region.getScanner(scan);
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close(); scanner.close();
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -5391,17 +5404,20 @@ public class TestHRegion {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowC)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowB)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
@ -5412,7 +5428,8 @@ public class TestHRegion {
scanner = region.getScanner(scan); scanner = region.getScanner(scan);
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), rowD)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close(); scanner.close();
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -5536,42 +5553,49 @@ public class TestHRegion {
// "row4" takes 2 next() calls since batch=3 // "row4" takes 2 next() calls since batch=3
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(3, currRow.size()); assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow.get(0).getRowLength(), row4, 0,
row4.length));
assertTrue(hasNext); assertTrue(hasNext);
// 2. scan out "row3" (2 kv) // 2. scan out "row3" (2 kv)
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row3)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext); assertTrue(hasNext);
// 3. scan out "row2" (4 kvs) // 3. scan out "row2" (4 kvs)
// "row2" takes 2 next() calls since batch=3 // "row2" takes 2 next() calls since batch=3
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(3, currRow.size()); assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext); assertTrue(hasNext);
// 4. scan out "row1" (2 kv) // 4. scan out "row1" (2 kv)
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(2, currRow.size()); assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertTrue(hasNext); assertTrue(hasNext);
// 5. scan out "row0" (1 kv) // 5. scan out "row0" (1 kv)
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row0)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row0, 0, row0.length));
assertFalse(hasNext); assertFalse(hasNext);
scanner.close(); scanner.close();
@ -5632,22 +5656,26 @@ public class TestHRegion {
List<Cell> currRow = new ArrayList<Cell>(); List<Cell> currRow = new ArrayList<Cell>();
boolean hasNext = scanner.next(currRow); boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row4)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row3)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row2)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext); assertTrue(hasNext);
currRow.clear(); currRow.clear();
hasNext = scanner.next(currRow); hasNext = scanner.next(currRow);
assertEquals(1, currRow.size()); assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRow(), row1)); assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertFalse(hasNext); assertFalse(hasNext);
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -5699,7 +5727,8 @@ public class TestHRegion {
int verify = startRow + 2 * numRows - 1; int verify = startRow + 2 * numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);
@ -5712,7 +5741,8 @@ public class TestHRegion {
verify = startRow + 2 * numRows - 1; verify = startRow + 2 * numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);
@ -5725,7 +5755,8 @@ public class TestHRegion {
verify = startRow + numRows - 1; verify = startRow + numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);
@ -5738,7 +5769,8 @@ public class TestHRegion {
verify = startRow + numRows - 1; verify = startRow + numRows - 1;
do { do {
more = scanner.next(currRow); more = scanner.next(currRow);
assertEquals(Bytes.toString(currRow.get(0).getRow()), verify + ""); assertEquals(Bytes.toString(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength()), verify + "");
verify--; verify--;
currRow.clear(); currRow.clear();
} while(more); } while(more);

View File

@ -279,7 +279,7 @@ public class TestHRegionReplayEvents {
if (WALEdit.isMetaEditFamily(entry.getEdit().getCells().get(0))) { if (WALEdit.isMetaEditFamily(entry.getEdit().getCells().get(0))) {
return 0; // handled elsewhere return 0; // handled elsewhere
} }
Put put = new Put(entry.getEdit().getCells().get(0).getRow()); Put put = new Put(CellUtil.cloneRow(entry.getEdit().getCells().get(0)));
for (Cell cell : entry.getEdit().getCells()) put.add(cell); for (Cell cell : entry.getEdit().getCells()) put.add(cell);
put.setDurability(Durability.SKIP_WAL); put.setDurability(Durability.SKIP_WAL);
MutationReplay mutation = new MutationReplay(MutationType.PUT, put, 0, 0); MutationReplay mutation = new MutationReplay(MutationType.PUT, put, 0, 0);

View File

@ -76,7 +76,7 @@ public class TestMajorCompaction {
private static final Log LOG = LogFactory.getLog(TestMajorCompaction.class.getName()); private static final Log LOG = LogFactory.getLog(TestMajorCompaction.class.getName());
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU(); private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
protected Configuration conf = UTIL.getConfiguration(); protected Configuration conf = UTIL.getConfiguration();
private Region r = null; private Region r = null;
private HTableDescriptor htd = null; private HTableDescriptor htd = null;
private static final byte [] COLUMN_FAMILY = fam1; private static final byte [] COLUMN_FAMILY = fam1;
@ -355,7 +355,7 @@ public class TestMajorCompaction {
HFileScanner scanner = f.getReader().getScanner(false, false); HFileScanner scanner = f.getReader().getScanner(false, false);
scanner.seekTo(); scanner.seekTo();
do { do {
byte [] row = scanner.getCell().getRow(); byte [] row = CellUtil.cloneRow(scanner.getCell());
if (Bytes.equals(row, STARTROW)) { if (Bytes.equals(row, STARTROW)) {
count1++; count1++;
} else if(Bytes.equals(row, secondRowBytes)) { } else if(Bytes.equals(row, secondRowBytes)) {
@ -434,7 +434,7 @@ public class TestMajorCompaction {
assertNotNull("Expected to receive a compaction request", request); assertNotNull("Expected to receive a compaction request", request);
assertEquals( assertEquals(
"User-requested major compaction should always occur, even if there are too many store files", "User-requested major compaction should always occur, even if there are too many store files",
true, true,
request.isMajor()); request.isMajor());
} }
@ -457,7 +457,7 @@ public class TestMajorCompaction {
List<Cell> results = new ArrayList<Cell>(); List<Cell> results = new ArrayList<Cell>();
boolean result = s.next(results); boolean result = s.next(results);
assertTrue(!results.isEmpty()); assertTrue(!results.isEmpty());
r.delete(new Delete(results.get(0).getRow())); r.delete(new Delete(CellUtil.cloneRow(results.get(0))));
if (!result) break; if (!result) break;
} while (true); } while (true);
s.close(); s.close();

View File

@ -21,38 +21,38 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import junit.framework.Assert;
@Category({RegionServerTests.class, MediumTests.class}) @Category({RegionServerTests.class, MediumTests.class})
public class TestScannerWithBulkload { public class TestScannerWithBulkload {
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@ -100,11 +100,16 @@ public class TestScannerWithBulkload {
while (result != null) { while (result != null) {
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
for (Cell _c : cells) { for (Cell _c : cells) {
if (Bytes.toString(_c.getRow()).equals("row1")) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())
System.out.println(Bytes.toString(_c.getRow())); .equals("row1")) {
System.out.println(Bytes.toString(_c.getQualifier())); System.out
System.out.println(Bytes.toString(_c.getValue())); .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()));
Assert.assertEquals("version3", Bytes.toString(_c.getValue())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(),
_c.getQualifierLength()));
System.out.println(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
Assert.assertEquals("version3",
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
} }
} }
result = scanner.next(); result = scanner.next();
@ -118,11 +123,16 @@ public class TestScannerWithBulkload {
while (result != null) { while (result != null) {
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
for (Cell _c : cells) { for (Cell _c : cells) {
if (Bytes.toString(_c.getRow()).equals("row1")) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())
System.out.println(Bytes.toString(_c.getRow())); .equals("row1")) {
System.out.println(Bytes.toString(_c.getQualifier())); System.out
System.out.println(Bytes.toString(_c.getValue())); .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()));
Assert.assertEquals(expctedVal, Bytes.toString(_c.getValue())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(),
_c.getQualifierLength()));
System.out.println(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
Assert.assertEquals(expctedVal,
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
} }
} }
result = scanner.next(); result = scanner.next();
@ -191,7 +201,9 @@ public class TestScannerWithBulkload {
Result result = scanner.next(); Result result = scanner.next();
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
Assert.assertEquals(1, cells.size()); Assert.assertEquals(1, cells.size());
Assert.assertEquals("version1", Bytes.toString(cells.get(0).getValue())); Cell _c = cells.get(0);
Assert.assertEquals("version1",
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
scanner.close(); scanner.close();
return table; return table;
} }
@ -270,11 +282,16 @@ public class TestScannerWithBulkload {
while (result != null) { while (result != null) {
List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q")); List<Cell> cells = result.getColumnCells(Bytes.toBytes("col"), Bytes.toBytes("q"));
for (Cell _c : cells) { for (Cell _c : cells) {
if (Bytes.toString(_c.getRow()).equals("row1")) { if (Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength())
System.out.println(Bytes.toString(_c.getRow())); .equals("row1")) {
System.out.println(Bytes.toString(_c.getQualifier())); System.out
System.out.println(Bytes.toString(_c.getValue())); .println(Bytes.toString(_c.getRowArray(), _c.getRowOffset(), _c.getRowLength()));
Assert.assertEquals("version3", Bytes.toString(_c.getValue())); System.out.println(Bytes.toString(_c.getQualifierArray(), _c.getQualifierOffset(),
_c.getQualifierLength()));
System.out.println(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
Assert.assertEquals("version3",
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength()));
} }
} }
result = scanner.next(); result = scanner.next();

View File

@ -34,14 +34,13 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HFileLink;
@ -54,6 +53,8 @@ import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ChecksumType;
@ -171,9 +172,9 @@ public class TestStoreFile extends HBaseTestCase {
// may be in middle of row. Create new one with empty column and // may be in middle of row. Create new one with empty column and
// timestamp. // timestamp.
Cell kv = reader.midkey(); Cell kv = reader.midkey();
byte [] midRow = kv.getRow(); byte [] midRow = CellUtil.cloneRow(kv);
kv = reader.getLastKey(); kv = reader.getLastKey();
byte [] finalRow = kv.getRow(); byte [] finalRow = CellUtil.cloneRow(kv);
// Make a reference // Make a reference
HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow); HRegionInfo splitHri = new HRegionInfo(hri.getTable(), null, midRow);
Path refPath = splitStoreFile(regionFs, splitHri, TEST_FAMILY, hsf, midRow, true); Path refPath = splitStoreFile(regionFs, splitHri, TEST_FAMILY, hsf, midRow, true);
@ -186,11 +187,13 @@ public class TestStoreFile extends HBaseTestCase {
ByteBuffer bb = ByteBuffer.wrap(((KeyValue) s.getKey()).getKey()); ByteBuffer bb = ByteBuffer.wrap(((KeyValue) s.getKey()).getKey());
kv = KeyValueUtil.createKeyValueFromKey(bb); kv = KeyValueUtil.createKeyValueFromKey(bb);
if (first) { if (first) {
assertTrue(Bytes.equals(kv.getRow(), midRow)); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), midRow, 0,
midRow.length));
first = false; first = false;
} }
} }
assertTrue(Bytes.equals(kv.getRow(), finalRow)); assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), finalRow, 0,
finalRow.length));
} }
@Test @Test
@ -301,7 +304,7 @@ public class TestStoreFile extends HBaseTestCase {
// Now confirm that I can read from the ref to link // Now confirm that I can read from the ref to link
HFileScanner sB = hsfB.createReader().getScanner(false, false); HFileScanner sB = hsfB.createReader().getScanner(false, false);
sB.seekTo(); sB.seekTo();
//count++ as seekTo() will advance the scanner //count++ as seekTo() will advance the scanner
count++; count++;
while (sB.next()) { while (sB.next()) {
@ -316,7 +319,7 @@ public class TestStoreFile extends HBaseTestCase {
throws IOException { throws IOException {
Cell midkey = f.createReader().midkey(); Cell midkey = f.createReader().midkey();
KeyValue midKV = (KeyValue)midkey; KeyValue midKV = (KeyValue)midkey;
byte [] midRow = midKV.getRow(); byte [] midRow = CellUtil.cloneRow(midKV);
// Create top split. // Create top split.
HRegionInfo topHri = new HRegionInfo(regionFs.getRegionInfo().getTable(), HRegionInfo topHri = new HRegionInfo(regionFs.getRegionInfo().getTable(),
null, midRow); null, midRow);
@ -384,9 +387,9 @@ public class TestStoreFile extends HBaseTestCase {
assertTrue(fs.exists(f.getPath())); assertTrue(fs.exists(f.getPath()));
topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true); topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true);
bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false); bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false);
assertNull(bottomPath); assertNull(bottomPath);
top = new StoreFile(this.fs, topPath, conf, cacheConf, BloomType.NONE).createReader(); top = new StoreFile(this.fs, topPath, conf, cacheConf, BloomType.NONE).createReader();
// Now read from the top. // Now read from the top.
first = true; first = true;
@ -402,7 +405,8 @@ public class TestStoreFile extends HBaseTestCase {
first = false; first = false;
KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("First top when key < bottom: " + keyKV); LOG.info("First top when key < bottom: " + keyKV);
String tmp = Bytes.toString(keyKV.getRow()); String tmp =
Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'a'); assertTrue(tmp.charAt(i) == 'a');
} }
@ -410,7 +414,7 @@ public class TestStoreFile extends HBaseTestCase {
} }
KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("Last top when key < bottom: " + keyKV); LOG.info("Last top when key < bottom: " + keyKV);
String tmp = Bytes.toString(keyKV.getRow()); String tmp = Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'z'); assertTrue(tmp.charAt(i) == 'z');
} }
@ -434,7 +438,7 @@ public class TestStoreFile extends HBaseTestCase {
first = false; first = false;
keyKV = KeyValueUtil.createKeyValueFromKey(key); keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("First bottom when key > top: " + keyKV); LOG.info("First bottom when key > top: " + keyKV);
tmp = Bytes.toString(keyKV.getRow()); tmp = Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'a'); assertTrue(tmp.charAt(i) == 'a');
} }
@ -443,7 +447,8 @@ public class TestStoreFile extends HBaseTestCase {
keyKV = KeyValueUtil.createKeyValueFromKey(key); keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("Last bottom when key > top: " + keyKV); LOG.info("Last bottom when key > top: " + keyKV);
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(Bytes.toString(keyKV.getRow()).charAt(i) == 'z'); assertTrue(Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength())
.charAt(i) == 'z');
} }
} finally { } finally {
if (top != null) { if (top != null) {
@ -500,7 +505,7 @@ public class TestStoreFile extends HBaseTestCase {
+ ", expected no more than " + maxFalsePos + ")", + ", expected no more than " + maxFalsePos + ")",
falsePos <= maxFalsePos); falsePos <= maxFalsePos);
} }
private static final int BLOCKSIZE_SMALL = 8192; private static final int BLOCKSIZE_SMALL = 8192;
@Test @Test
@ -909,7 +914,7 @@ public class TestStoreFile extends HBaseTestCase {
KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1); KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1);
KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2); KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2);
assertTrue(Bytes.compareTo( assertTrue(Bytes.compareTo(
keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(), keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(),
keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0); keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0);
assertTrue(Bytes.compareTo( assertTrue(Bytes.compareTo(
kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(), kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -175,7 +176,7 @@ public class TestStripeCompactor {
List<byte[]> boundaries = new ArrayList<byte[]>(); List<byte[]> boundaries = new ArrayList<byte[]>();
boundaries.add(left); boundaries.add(left);
for (int i = 1; i < output.length; ++i) { for (int i = 1; i < output.length; ++i) {
boundaries.add(output[i][0].getRow()); boundaries.add(CellUtil.cloneRow(output[i][0]));
} }
boundaries.add(right); boundaries.add(right);
writers.verifyBoundaries(boundaries.toArray(new byte[][] {})); writers.verifyBoundaries(boundaries.toArray(new byte[][] {}));

View File

@ -34,14 +34,11 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -56,6 +53,8 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -568,15 +567,16 @@ public class TestTags {
for (Cell cell : edits) { for (Cell cell : edits) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (cf == null) { if (cf == null) {
cf = kv.getFamily(); cf = CellUtil.cloneFamily(kv);
} }
Tag tag = new Tag((byte) 1, attribute); Tag tag = new Tag((byte) 1, attribute);
List<Tag> tagList = new ArrayList<Tag>(); List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag); tagList.add(tag);
KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, KeyValue newKV = new KeyValue(CellUtil.cloneRow(kv), 0, kv.getRowLength(),
kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), CellUtil.cloneFamily(kv), 0, kv.getFamilyLength(), CellUtil.cloneQualifier(kv), 0,
kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, kv.getQualifierLength(), kv.getTimestamp(),
KeyValue.Type.codeToType(kv.getTypeByte()), CellUtil.cloneValue(kv), 0,
kv.getValueLength(), tagList); kv.getValueLength(), tagList);
((List<Cell>) updatedCells).add(newKV); ((List<Cell>) updatedCells).add(newKV);
} }

View File

@ -510,7 +510,8 @@ public class TestLogRolling {
while ((entry = reader.next()) != null) { while ((entry = reader.next()) != null) {
LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getCells()); LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getCells());
for (Cell cell : entry.getEdit().getCells()) { for (Cell cell : entry.getEdit().getCells()) {
loggedRows.add(Bytes.toStringBinary(cell.getRow())); loggedRows.add(Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength()));
} }
} }
} catch (EOFException e) { } catch (EOFException e) {

View File

@ -26,12 +26,11 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -62,8 +61,6 @@ import org.junit.rules.TestName;
*/ */
@Category({RegionServerTests.class, MediumTests.class}) @Category({RegionServerTests.class, MediumTests.class})
public class TestProtobufLog { public class TestProtobufLog {
private static final Log LOG = LogFactory.getLog(TestProtobufLog.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
protected FileSystem fs; protected FileSystem fs;
@ -189,9 +186,10 @@ public class TestProtobufLog {
assertEquals(tableName, entry.getKey().getTablename()); assertEquals(tableName, entry.getKey().getTablename());
int idx = 0; int idx = 0;
for (Cell val : entry.getEdit().getCells()) { for (Cell val : entry.getEdit().getCells()) {
assertTrue(Bytes.equals(row, val.getRow())); assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(),
val.getRowLength()));
String value = i + "" + idx; String value = i + "" + idx;
assertArrayEquals(Bytes.toBytes(value), val.getValue()); assertArrayEquals(Bytes.toBytes(value), CellUtil.cloneValue(val));
idx++; idx++;
} }
} }

View File

@ -29,6 +29,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -43,8 +44,6 @@ import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -208,15 +207,16 @@ public class TestReplicationWithTags {
for (Cell cell : edits) { for (Cell cell : edits) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (cf == null) { if (cf == null) {
cf = kv.getFamily(); cf = CellUtil.cloneFamily(kv);
} }
Tag tag = new Tag(TAG_TYPE, attribute); Tag tag = new Tag(TAG_TYPE, attribute);
List<Tag> tagList = new ArrayList<Tag>(); List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag); tagList.add(tag);
KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0, KeyValue newKV = new KeyValue(CellUtil.cloneRow(kv), 0, kv.getRowLength(),
kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(), CellUtil.cloneFamily(kv), 0, kv.getFamilyLength(), CellUtil.cloneQualifier(kv), 0,
kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0, kv.getQualifierLength(), kv.getTimestamp(),
KeyValue.Type.codeToType(kv.getTypeByte()), CellUtil.cloneValue(kv), 0,
kv.getValueLength(), tagList); kv.getValueLength(), tagList);
((List<Cell>) updatedCells).add(newKV); ((List<Cell>) updatedCells).add(newKV);
} }

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.replication.regionserver;
import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion; import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion;
import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.openRegion; import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.openRegion;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -28,9 +28,9 @@ import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.RegionLocations;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory; import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -49,12 +48,9 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.WALCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster; import org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContext; import org.apache.hadoop.hbase.replication.ReplicationEndpoint.ReplicateContext;
@ -65,6 +61,8 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALKey;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
@ -82,9 +80,6 @@ import com.google.common.collect.Lists;
@Category({ReplicationTests.class, MediumTests.class}) @Category({ReplicationTests.class, MediumTests.class})
public class TestRegionReplicaReplicationEndpointNoMaster { public class TestRegionReplicaReplicationEndpointNoMaster {
private static final Log LOG = LogFactory.getLog(
TestRegionReplicaReplicationEndpointNoMaster.class);
private static final int NB_SERVERS = 2; private static final int NB_SERVERS = 2;
private static TableName tableName = TableName.valueOf( private static TableName tableName = TableName.valueOf(
TestRegionReplicaReplicationEndpointNoMaster.class.getSimpleName()); TestRegionReplicaReplicationEndpointNoMaster.class.getSimpleName());
@ -193,7 +188,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
throws IOException, RuntimeException { throws IOException, RuntimeException {
Entry entry; Entry entry;
while ((entry = entries.poll()) != null) { while ((entry = entries.poll()) != null) {
byte[] row = entry.getEdit().getCells().get(0).getRow(); byte[] row = CellUtil.cloneRow(entry.getEdit().getCells().get(0));
RegionLocations locations = connection.locateRegion(tableName, row, true, true); RegionLocations locations = connection.locateRegion(tableName, row, true, true);
RegionReplicaReplayCallable callable = new RegionReplicaReplayCallable(connection, RegionReplicaReplayCallable callable = new RegionReplicaReplayCallable(connection,
RpcControllerFactory.instantiate(connection.getConfiguration()), RpcControllerFactory.instantiate(connection.getConfiguration()),
@ -298,7 +293,9 @@ public class TestRegionReplicaReplicationEndpointNoMaster {
Assert.assertEquals(1000, entries.size()); Assert.assertEquals(1000, entries.size());
for (Entry e: entries) { for (Entry e: entries) {
if (Integer.parseInt(Bytes.toString(e.getEdit().getCells().get(0).getValue())) % 2 == 0) { Cell _c = e.getEdit().getCells().get(0);
if (Integer.parseInt(
Bytes.toString(_c.getValueArray(), _c.getValueOffset(), _c.getValueLength())) % 2 == 0) {
e.getKey().setOrigLogSeqNum(1); // simulate dist log replay by setting orig seq id e.getKey().setOrigLogSeqNum(1); // simulate dist log replay by setting orig seq id
} }
} }

View File

@ -137,20 +137,26 @@ public class TestDefaultScanLabelGeneratorStack {
Cell current = cellScanner.current(); Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q1)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value1)); current.getQualifierLength(), Q1, 0, Q1.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value1, 0, value1.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
return null; return null;
} }
@ -173,15 +179,19 @@ public class TestDefaultScanLabelGeneratorStack {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
// Test scan with correct auth attribute for test user // Test scan with correct auth attribute for test user
Scan s1 = new Scan(); Scan s1 = new Scan();
@ -198,15 +208,19 @@ public class TestDefaultScanLabelGeneratorStack {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q2)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value2)); current1.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value2, 0, value2.length));
cellScanner1.advance(); cellScanner1.advance();
current1 = cellScanner1.current(); current1 = cellScanner1.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value3)); current1.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value3, 0, value3.length));
// Test scan with incorrect auth attribute for test user // Test scan with incorrect auth attribute for test user
Scan s2 = new Scan(); Scan s2 = new Scan();
@ -221,8 +235,10 @@ public class TestDefaultScanLabelGeneratorStack {
// This scan will only see value3 (no label) // This scan will only see value3 (no label)
assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(), assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(),
current2.getRowLength(), ROW_1, 0, ROW_1.length)); current2.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current2.getQualifier(), Q3)); assertTrue(Bytes.equals(current2.getQualifierArray(), current2.getQualifierOffset(),
assertTrue(Bytes.equals(current2.getValue(), value3)); current2.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current2.getValueArray(), current2.getValueOffset(),
current2.getValueLength(), value3, 0, value3.length));
assertFalse(cellScanner2.advance()); assertFalse(cellScanner2.advance());

View File

@ -50,7 +50,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
@ -406,7 +405,7 @@ public class TestVisibilityLabelsReplication {
for (Cell cell : edits) { for (Cell cell : edits) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
if (cf == null) { if (cf == null) {
cf = kv.getFamily(); cf = CellUtil.cloneFamily(kv);
} }
Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute); Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute);
List<Tag> tagList = new ArrayList<Tag>(); List<Tag> tagList = new ArrayList<Tag>();
@ -414,10 +413,6 @@ public class TestVisibilityLabelsReplication {
tagList.addAll(kv.getTags()); tagList.addAll(kv.getTags());
byte[] fromList = Tag.fromList(tagList); byte[] fromList = Tag.fromList(tagList);
TagRewriteCell newcell = new TagRewriteCell(kv, fromList); TagRewriteCell newcell = new TagRewriteCell(kv, fromList);
KeyValue newKV = new KeyValue(kv.getRow(), 0, kv.getRowLength(), kv.getFamily(), 0,
kv.getFamilyLength(), kv.getQualifier(), 0, kv.getQualifierLength(),
kv.getTimestamp(), KeyValue.Type.codeToType(kv.getType()), kv.getValue(), 0,
kv.getValueLength(), tagList);
((List<Cell>) updatedCells).add(newcell); ((List<Cell>) updatedCells).add(newcell);
} }
} }

View File

@ -151,20 +151,26 @@ public class TestVisibilityLablesWithGroups {
Cell current = cellScanner.current(); Cell current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q1)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value1)); current.getQualifierLength(), Q1, 0, Q1.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value1, 0, value1.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
} }
return null; return null;
} }
@ -206,15 +212,19 @@ public class TestVisibilityLablesWithGroups {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q2)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value2)); current.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value2, 0, value2.length));
cellScanner.advance(); cellScanner.advance();
current = cellScanner.current(); current = cellScanner.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(), assertTrue(Bytes.equals(current.getRowArray(), current.getRowOffset(),
current.getRowLength(), ROW_1, 0, ROW_1.length)); current.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current.getQualifier(), Q3)); assertTrue(Bytes.equals(current.getQualifierArray(), current.getQualifierOffset(),
assertTrue(Bytes.equals(current.getValue(), value3)); current.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current.getValueArray(), current.getValueOffset(),
current.getValueLength(), value3, 0, value3.length));
// Test scan with correct auth attribute for test user // Test scan with correct auth attribute for test user
Scan s1 = new Scan(); Scan s1 = new Scan();
@ -231,15 +241,19 @@ public class TestVisibilityLablesWithGroups {
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q2)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value2)); current1.getQualifierLength(), Q2, 0, Q2.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value2, 0, value2.length));
cellScanner1.advance(); cellScanner1.advance();
current1 = cellScanner1.current(); current1 = cellScanner1.current();
// test user can see value2 (CONFIDENTIAL) and value3 (no label) // test user can see value2 (CONFIDENTIAL) and value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value3)); current1.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value3, 0, value3.length));
// Test scan with incorrect auth attribute for test user // Test scan with incorrect auth attribute for test user
Scan s2 = new Scan(); Scan s2 = new Scan();
@ -254,8 +268,10 @@ public class TestVisibilityLablesWithGroups {
// This scan will only see value3 (no label) // This scan will only see value3 (no label)
assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(), assertTrue(Bytes.equals(current2.getRowArray(), current2.getRowOffset(),
current2.getRowLength(), ROW_1, 0, ROW_1.length)); current2.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current2.getQualifier(), Q3)); assertTrue(Bytes.equals(current2.getQualifierArray(), current2.getQualifierOffset(),
assertTrue(Bytes.equals(current2.getValue(), value3)); current2.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current2.getValueArray(), current2.getValueOffset(),
current2.getValueLength(), value3, 0, value3.length));
assertFalse(cellScanner2.advance()); assertFalse(cellScanner2.advance());
} }
@ -315,8 +331,10 @@ public class TestVisibilityLablesWithGroups {
// test user can only see value3 (no label) // test user can only see value3 (no label)
assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(), assertTrue(Bytes.equals(current1.getRowArray(), current1.getRowOffset(),
current1.getRowLength(), ROW_1, 0, ROW_1.length)); current1.getRowLength(), ROW_1, 0, ROW_1.length));
assertTrue(Bytes.equals(current1.getQualifier(), Q3)); assertTrue(Bytes.equals(current1.getQualifierArray(), current1.getQualifierOffset(),
assertTrue(Bytes.equals(current1.getValue(), value3)); current1.getQualifierLength(), Q3, 0, Q3.length));
assertTrue(Bytes.equals(current1.getValueArray(), current1.getValueOffset(),
current1.getValueLength(), value3, 0, value3.length));
assertFalse(cellScanner1.advance()); assertFalse(cellScanner1.advance());
} }

View File

@ -29,24 +29,22 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.NavigableSet; import java.util.NavigableSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.IsolationLevel; import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@ -55,9 +53,9 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.ScanInfo;
import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -66,7 +64,6 @@ import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.Parameters;
@ -74,7 +71,6 @@ import org.junit.runners.Parameterized.Parameters;
@Category({MiscTests.class, MediumTests.class}) @Category({MiscTests.class, MediumTests.class})
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class TestCoprocessorScanPolicy { public class TestCoprocessorScanPolicy {
private static final Log LOG = LogFactory.getLog(TestCoprocessorScanPolicy.class);
protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] F = Bytes.toBytes("fam"); private static final byte[] F = Bytes.toBytes("fam");
private static final byte[] Q = Bytes.toBytes("qual"); private static final byte[] Q = Bytes.toBytes("qual");
@ -229,12 +225,16 @@ public class TestCoprocessorScanPolicy {
if (put.getAttribute("ttl") != null) { if (put.getAttribute("ttl") != null) {
Cell cell = put.getFamilyCellMap().values().iterator().next().get(0); Cell cell = put.getFamilyCellMap().values().iterator().next().get(0);
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
ttls.put(TableName.valueOf(kv.getQualifier()), Bytes.toLong(kv.getValue())); ttls.put(TableName.valueOf(
Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())),
Bytes.toLong(CellUtil.cloneValue(kv)));
c.bypass(); c.bypass();
} else if (put.getAttribute("versions") != null) { } else if (put.getAttribute("versions") != null) {
Cell cell = put.getFamilyCellMap().values().iterator().next().get(0); Cell cell = put.getFamilyCellMap().values().iterator().next().get(0);
KeyValue kv = KeyValueUtil.ensureKeyValue(cell); KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
versions.put(TableName.valueOf(kv.getQualifier()), Bytes.toInt(kv.getValue())); versions.put(TableName.valueOf(
Bytes.toString(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength())),
Bytes.toInt(CellUtil.cloneValue(kv)));
c.bypass(); c.bypass();
} }
} }

View File

@ -39,19 +39,16 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting; import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -123,12 +120,12 @@ public class TestSecureWAL {
List<Cell> cells = entry.getEdit().getCells(); List<Cell> cells = entry.getEdit().getCells();
assertTrue("Should be one KV per WALEdit", cells.size() == 1); assertTrue("Should be one KV per WALEdit", cells.size() == 1);
for (Cell cell: cells) { for (Cell cell: cells) {
byte[] thisRow = cell.getRow(); assertTrue("Incorrect row", Bytes.equals(cell.getRowArray(), cell.getRowOffset(),
assertTrue("Incorrect row", Bytes.equals(thisRow, row)); cell.getRowLength(), row, 0, row.length));
byte[] thisFamily = cell.getFamily(); assertTrue("Incorrect family", Bytes.equals(cell.getFamilyArray(), cell.getFamilyOffset(),
assertTrue("Incorrect family", Bytes.equals(thisFamily, family)); cell.getFamilyLength(), family, 0, family.length));
byte[] thisValue = cell.getValue(); assertTrue("Incorrect value", Bytes.equals(cell.getValueArray(), cell.getValueOffset(),
assertTrue("Incorrect value", Bytes.equals(thisValue, value)); cell.getValueLength(), value, 0, value.length));
} }
} }
assertEquals("Should have read back as many KVs as written", total, count); assertEquals("Should have read back as many KVs as written", total, count);

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -51,6 +52,12 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver; import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -68,14 +75,6 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader;
import org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
/** /**
* WAL tests that can be reused across providers. * WAL tests that can be reused across providers.
*/ */
@ -521,8 +520,9 @@ public class TestWALFactory {
assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName())); assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName()));
assertTrue(htd.getTableName().equals(key.getTablename())); assertTrue(htd.getTableName().equals(key.getTablename()));
Cell cell = val.getCells().get(0); Cell cell = val.getCells().get(0);
assertTrue(Bytes.equals(row, cell.getRow())); assertTrue(Bytes.equals(row, 0, row.length, cell.getRowArray(), cell.getRowOffset(),
assertEquals((byte)(i + '0'), cell.getValue()[0]); cell.getRowLength()));
assertEquals((byte)(i + '0'), CellUtil.cloneValue(cell)[0]);
System.out.println(key + " " + val); System.out.println(key + " " + val);
} }
} finally { } finally {
@ -574,8 +574,9 @@ public class TestWALFactory {
assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(), assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(),
entry.getKey().getEncodedRegionName())); entry.getKey().getEncodedRegionName()));
assertTrue(htd.getTableName().equals(entry.getKey().getTablename())); assertTrue(htd.getTableName().equals(entry.getKey().getTablename()));
assertTrue(Bytes.equals(row, val.getRow())); assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(),
assertEquals((byte)(idx + '0'), val.getValue()[0]); val.getRowLength()));
assertEquals((byte) (idx + '0'), CellUtil.cloneValue(val)[0]);
System.out.println(entry.getKey() + " " + val); System.out.println(entry.getKey() + " " + val);
idx++; idx++;
} }
@ -687,9 +688,10 @@ public class TestWALFactory {
assertEquals(tableName, entry.getKey().getTablename()); assertEquals(tableName, entry.getKey().getTablename());
int idx = 0; int idx = 0;
for (Cell val : entry.getEdit().getCells()) { for (Cell val : entry.getEdit().getCells()) {
assertTrue(Bytes.equals(row, val.getRow())); assertTrue(Bytes.equals(row, 0, row.length, val.getRowArray(), val.getRowOffset(),
val.getRowLength()));
String value = i + "" + idx; String value = i + "" + idx;
assertArrayEquals(Bytes.toBytes(value), val.getValue()); assertArrayEquals(Bytes.toBytes(value), CellUtil.cloneValue(val));
idx++; idx++;
} }
} }

View File

@ -41,13 +41,6 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.log4j.Level;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -62,18 +55,24 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.regionserver.wal.FaultySequenceFileLogReader;
import org.apache.hadoop.hbase.wal.WAL.Reader; import org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter;
import org.apache.hadoop.hbase.wal.WALProvider.Writer; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WAL.Reader;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitter.CorruptedLogFileException;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
@ -82,9 +81,9 @@ import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Rule; import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
@ -92,12 +91,6 @@ import org.mockito.stubbing.Answer;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.InstrumentedLogWriter;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.FaultySequenceFileLogReader;
/** /**
* Testing {@link WAL} splitting code. * Testing {@link WAL} splitting code.
*/ */
@ -187,7 +180,7 @@ public class TestWALSplit {
REGIONS.clear(); REGIONS.clear();
Collections.addAll(REGIONS, "bbb", "ccc"); Collections.addAll(REGIONS, "bbb", "ccc");
InstrumentedLogWriter.activateFailure = false; InstrumentedLogWriter.activateFailure = false;
this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? this.mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ?
RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING); RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING);
wals = new WALFactory(conf, null, name.getMethodName()); wals = new WALFactory(conf, null, name.getMethodName());
WALDIR = new Path(HBASEDIR, DefaultWALProvider.getWALDirectoryName(name.getMethodName())); WALDIR = new Path(HBASEDIR, DefaultWALProvider.getWALDirectoryName(name.getMethodName()));
@ -957,7 +950,8 @@ public class TestWALSplit {
Cell cell = cells.get(0); Cell cell = cells.get(0);
// Check that the edits come in the right order. // Check that the edits come in the right order.
assertEquals(expectedIndex, Bytes.toInt(cell.getRow())); assertEquals(expectedIndex, Bytes.toInt(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength()));
expectedIndex++; expectedIndex++;
return null; return null;
} }

View File

@ -224,7 +224,8 @@ EOF
# Fetch cell value # Fetch cell value
cell = result.listCells[0] cell = result.listCells[0]
org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue) org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray,
cell.getValueOffset, cell.getValueLength)
end end
#---------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------
@ -371,8 +372,10 @@ EOF
# Print out results. Result can be Cell or RowResult. # Print out results. Result can be Cell or RowResult.
res = {} res = {}
result.listCells.each do |c| result.listCells.each do |c|
family = String.from_java_bytes(c.getFamily) family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray,
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifier) c.getFamilyOffset, c.getFamilyLength)
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray,
c.getQualifierOffset, c.getQualifierLength)
column = "#{family}:#{qualifier}" column = "#{family}:#{qualifier}"
value = to_string(column, c, maxlength) value = to_string(column, c, maxlength)
@ -403,7 +406,8 @@ EOF
# Fetch cell value # Fetch cell value
cell = result.listCells[0] cell = result.listCells[0]
org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue) org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray,
cell.getValueOffset, cell.getValueLength)
end end
def _hash_to_scan(args) def _hash_to_scan(args)
@ -505,8 +509,10 @@ EOF
key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow) key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow)
row.listCells.each do |c| row.listCells.each do |c|
family = String.from_java_bytes(c.getFamily) family = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getFamilyArray,
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifier) c.getFamilyOffset, c.getFamilyLength)
qualifier = org.apache.hadoop.hbase.util.Bytes::toStringBinary(c.getQualifierArray,
c.getQualifierOffset, c.getQualifierLength)
column = "#{family}:#{qualifier}" column = "#{family}:#{qualifier}"
cell = to_string(column, c, maxlength) cell = to_string(column, c, maxlength)
@ -640,14 +646,17 @@ EOF
def to_string(column, kv, maxlength = -1) def to_string(column, kv, maxlength = -1)
if is_meta_table? if is_meta_table?
if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB' if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB'
hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValue) hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValueArray,
kv.getValueOffset, kv.getValueLength)
return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString] return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString]
end end
if column == 'info:serverstartcode' if column == 'info:serverstartcode'
if kv.getValue.length > 0 if kv.getValue.length > 0
str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValue) str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValueArray,
kv.getValueOffset, kv.getValueLength)
else else
str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValue) str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValueArray,
kv.getValueOffset, kv.getValueLength)
end end
return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val] return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val]
end end
@ -679,7 +688,7 @@ EOF
end end
end end
method = eval(klazz_name).method(converter) method = eval(klazz_name).method(converter)
return method.call(kv.getValue) # apply the converter return method.call(org.apache.hadoop.hbase.CellUtil.cloneValue(kv)) # apply the converter
end end
# if the column spec contains CONVERTER information, to get rid of :CONVERTER info from column pair. # if the column spec contains CONVERTER information, to get rid of :CONVERTER info from column pair.