HBASE-19092 Make Tag IA.LimitedPrivate and expose for CPs (Ram)

Signed-off-by: Chia-ping Tsai, Anoop Sam John, Stack
This commit is contained in:
Vasudevan 2017-11-24 12:38:42 +05:30
parent 2838cf3e05
commit 6ac6ae3fa2
50 changed files with 626 additions and 575 deletions

View File

@ -34,11 +34,9 @@ import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@ -48,14 +46,14 @@ import org.apache.hadoop.hbase.security.access.AccessControlUtil;
import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams; import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public @InterfaceAudience.Public
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable, public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
@ -230,7 +228,8 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
if (tags != null) { if (tags != null) {
List<String> tagsString = new ArrayList<>(tags.size()); List<String> tagsString = new ArrayList<>(tags.size());
for (Tag t : tags) { for (Tag t : tags) {
tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(t))); tagsString
.add((t.getType()) + ":" + Bytes.toStringBinary(Tag.cloneValue(t)));
} }
stringMap.put("tag", tagsString); stringMap.put("tag", tagsString);
} }

View File

@ -30,7 +30,6 @@ import java.util.NavigableSet;
import java.util.function.Function; import java.util.function.Function;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
@ -46,7 +45,6 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.TagUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -92,6 +90,7 @@ import org.apache.hadoop.hbase.util.DynamicClassLoader;
import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.hbase.util.Methods;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import com.google.protobuf.ByteString; import com.google.protobuf.ByteString;
import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedInputStream;
@ -539,7 +538,8 @@ public final class ProtobufUtil {
.setTags(allTagsBytes) .setTags(allTagsBytes)
.build()); .build());
} else { } else {
List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); List<Tag> tags =
TagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length);
Tag[] tagsArray = new Tag[tags.size()]; Tag[] tagsArray = new Tag[tags.size()];
put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray)); put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
} }

View File

@ -673,7 +673,8 @@ public final class ProtobufUtil {
.setTags(allTagsBytes) .setTags(allTagsBytes)
.build()); .build());
} else { } else {
List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); List<Tag> tags =
TagUtil.asList(allTagsBytes, 0, (short) allTagsBytes.length);
Tag[] tagsArray = new Tag[tags.size()]; Tag[] tagsArray = new Tag[tags.size()];
put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray)); put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
} }

View File

@ -184,9 +184,10 @@ public interface Cell {
/** /**
* HBase internally uses 2 bytes to store tags length in Cell. * HBase internally uses 2 bytes to store tags length in Cell.
* As the tags length is always a non-negative number, to make good use of the sign bit, * As the tags length is always a non-negative number, to make good use of the sign bit,
* the max of tags length is defined as {@link TagUtil#MAX_TAGS_LENGTH}, which is 2 * Short.MAX_VALUE + 1 = 65535. * the max of tags length is defined 2 * Short.MAX_VALUE + 1 = 65535.
* As a result, the return type is int, because a short is not capable of handling that. * As a result, the return type is int, because a short is not capable of handling that.
* Please note that even if the return type is int, the max tags length is far less than Integer.MAX_VALUE. * Please note that even if the return type is int, the max tags length is far
* less than Integer.MAX_VALUE.
* *
* @return the total length of the tags in the Cell. * @return the total length of the tags in the Cell.
*/ */

View File

@ -18,10 +18,10 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIMITER; import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIMITER;
import static org.apache.hadoop.hbase.KeyValue.getDelimiter;
import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIM_ARRAY; import static org.apache.hadoop.hbase.KeyValue.COLUMN_FAMILY_DELIM_ARRAY;
import static org.apache.hadoop.hbase.KeyValue.getDelimiter;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.DataOutputStream; import java.io.DataOutputStream;
@ -33,17 +33,16 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.NavigableMap; import java.util.NavigableMap;
import java.util.Optional;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience.Private;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience.Private;
/** /**
* Utility methods helpful for slinging {@link Cell} instances. Some methods below are for internal * Utility methods helpful for slinging {@link Cell} instances. Some methods below are for internal
@ -130,6 +129,7 @@ public final class CellUtil {
/** /**
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. * @deprecated As of HBase-2.0. Will be removed in HBase-3.0.
* Use {@link RawCell#cloneTags()}
*/ */
@Deprecated @Deprecated
public static byte[] cloneTags(Cell cell) { public static byte[] cloneTags(Cell cell) {
@ -588,17 +588,21 @@ public final class CellUtil {
} }
/** /**
* Note : Now only CPs can create cell with tags using the CP environment
* @return A new cell which is having the extra tags also added to it. * @return A new cell which is having the extra tags also added to it.
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. * @deprecated As of HBase-2.0. Will be removed in HBase-3.0.
* Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/ */
@Deprecated @Deprecated
public static Cell createCell(Cell cell, List<Tag> tags) { public static Cell createCell(Cell cell, List<Tag> tags) {
return createCell(cell, TagUtil.fromList(tags)); return createCell(cell, Tag.fromList(tags));
} }
/** /**
* Now only CPs can create cell with tags using the CP environment
* @return A new cell which is having the extra tags also added to it. * @return A new cell which is having the extra tags also added to it.
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. * @deprecated As of HBase-2.0. Will be removed in HBase-3.0.
* Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/ */
@Deprecated @Deprecated
public static Cell createCell(Cell cell, byte[] tags) { public static Cell createCell(Cell cell, byte[] tags) {
@ -609,7 +613,9 @@ public final class CellUtil {
} }
/** /**
* Now only CPs can create cell with tags using the CP environment
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. * @deprecated As of HBase-2.0. Will be removed in HBase-3.0.
* Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/ */
@Deprecated @Deprecated
public static Cell createCell(Cell cell, byte[] value, byte[] tags) { public static Cell createCell(Cell cell, byte[] value, byte[] tags) {
@ -1063,6 +1069,7 @@ public final class CellUtil {
* @param cell * @param cell
* @return estimate of the heap space * @return estimate of the heap space
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link RawCell#getTags()}
*/ */
@Deprecated @Deprecated
public static long estimatedHeapSizeOf(final Cell cell) { public static long estimatedHeapSizeOf(final Cell cell) {
@ -1133,31 +1140,17 @@ public final class CellUtil {
* @param type Type of the Tag to retrieve * @param type Type of the Tag to retrieve
* @return null if there is no tag of the passed in tag type * @return null if there is no tag of the passed in tag type
* @deprecated As of 2.0.0 and will be removed in HBase-3.0.0 * @deprecated As of 2.0.0 and will be removed in HBase-3.0.0
* Use {@link RawCell#getTag(byte)}
*/ */
@Deprecated @Deprecated
public static Tag getTag(Cell cell, byte type) { public static Tag getTag(Cell cell, byte type) {
boolean bufferBacked = cell instanceof ByteBufferCell; Optional<Tag> tag = PrivateCellUtil.getTag(cell, type);
int length = cell.getTagsLength(); if (tag.isPresent()) {
int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset(); return tag.get();
int pos = offset;
while (pos < offset + length) {
int tagLen;
if (bufferBacked) {
ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE);
}
} else { } else {
tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE);
if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE);
}
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return null; return null;
} }
}
/** /**
* Returns true if the first range start1...end1 overlaps with the second range start2...end2, * Returns true if the first range start1...end1 overlaps with the second range start2...end2,

View File

@ -21,8 +21,8 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Extension to {@link Cell} with server side required functions. Server side Cell implementations * Extension to {@link Cell} with server side required functions. Server side Cell implementations
@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.io.HeapSize;
* @see SettableSequenceId * @see SettableSequenceId
* @see SettableTimestamp * @see SettableTimestamp
*/ */
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) @InterfaceAudience.Private
public interface ExtendedCell extends Cell, SettableSequenceId, SettableTimestamp, HeapSize, public interface ExtendedCell extends RawCell, SettableSequenceId, SettableTimestamp, HeapSize,
Cloneable { Cloneable {
public static int CELL_NOT_BASED_ON_CHUNK = -1; public static int CELL_NOT_BASED_ON_CHUNK = -1;

View File

@ -26,7 +26,7 @@ import org.apache.yetus.audience.InterfaceAudience;
* Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance. * Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance.
* TODO: ditto for ByteBufferCell? * TODO: ditto for ByteBufferCell?
*/ */
@InterfaceAudience.Private @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
public interface ExtendedCellBuilder extends CellBuilder { public interface ExtendedCellBuilder extends CellBuilder {
@Override @Override
ExtendedCellBuilder setRow(final byte[] row); ExtendedCellBuilder setRow(final byte[] row);
@ -62,8 +62,15 @@ public interface ExtendedCellBuilder extends CellBuilder {
@Override @Override
ExtendedCellBuilder clear(); ExtendedCellBuilder clear();
// TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[]
ExtendedCellBuilder setTags(final byte[] tags); ExtendedCellBuilder setTags(final byte[] tags);
// TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[]
ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength); ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength);
/**
* Internal usage. Be careful before you use this while building a cell
* @param seqId set the seqId
* @return the current ExternalCellBuilder
*/
ExtendedCellBuilder setSequenceId(final long seqId); ExtendedCellBuilder setSequenceId(final long seqId);
} }

View File

@ -25,11 +25,24 @@ import org.apache.yetus.audience.InterfaceAudience;
public final class ExtendedCellBuilderFactory { public final class ExtendedCellBuilderFactory {
public static ExtendedCellBuilder create(CellBuilderType type) { public static ExtendedCellBuilder create(CellBuilderType type) {
return create(type, true);
}
/**
* Allows creating a cell with the given CellBuilderType.
* @param type the type of CellBuilder(DEEP_COPY or SHALLOW_COPY).
* @param allowSeqIdUpdate if seqId can be updated. CPs are not allowed to update
* the seqId
* @return the cell that is created
*/
public static ExtendedCellBuilder create(CellBuilderType type, boolean allowSeqIdUpdate) {
switch (type) { switch (type) {
case SHALLOW_COPY: case SHALLOW_COPY:
// CPs are not allowed to update seqID and they always use DEEP_COPY. So we have not
// passing 'allowSeqIdUpdate' to IndividualBytesFieldCellBuilder
return new IndividualBytesFieldCellBuilder(); return new IndividualBytesFieldCellBuilder();
case DEEP_COPY: case DEEP_COPY:
return new KeyValueBuilder(); return new KeyValueBuilder(allowSeqIdUpdate);
default: default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported"); throw new UnsupportedOperationException("The type:" + type + " is unsupported");
} }

View File

@ -40,6 +40,12 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
protected byte[] tags = null; protected byte[] tags = null;
protected int tagsOffset = 0; protected int tagsOffset = 0;
protected int tagsLength = 0; protected int tagsLength = 0;
// Will go away once we do with RawCellBuilder
protected boolean allowSeqIdUpdate = false;
public ExtendedCellBuilderImpl(boolean allowSeqIdUpdate) {
this.allowSeqIdUpdate = allowSeqIdUpdate;
}
@Override @Override
public ExtendedCellBuilder setRow(final byte[] row) { public ExtendedCellBuilder setRow(final byte[] row) {
@ -126,9 +132,12 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
@Override @Override
public ExtendedCellBuilder setSequenceId(final long seqId) { public ExtendedCellBuilder setSequenceId(final long seqId) {
if (allowSeqIdUpdate) {
this.seqId = seqId; this.seqId = seqId;
return this; return this;
} }
throw new UnsupportedOperationException("SeqId cannot be set on this cell");
}
private void checkBeforeBuild() { private void checkBeforeBuild() {
if (type == null) { if (type == null) {

View File

@ -94,7 +94,7 @@ public class IndividualBytesFieldCell implements ExtendedCell {
} }
// Check tags // Check tags
TagUtil.checkForTagsLength(tagsLength); RawCell.checkForTagsLength(tagsLength);
checkArrayBounds(row, rOffset, rLength); checkArrayBounds(row, rOffset, rLength);
checkArrayBounds(family, fOffset, fLength); checkArrayBounds(family, fOffset, fLength);
checkArrayBounds(qualifier, qOffset, qLength); checkArrayBounds(qualifier, qOffset, qLength);

View File

@ -21,6 +21,15 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl { class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl {
public IndividualBytesFieldCellBuilder() {
this(true);
}
public IndividualBytesFieldCellBuilder(boolean allowSeqIdUpdate) {
super(allowSeqIdUpdate);
}
@Override @Override
public ExtendedCell innerBuild() { public ExtendedCell innerBuild() {
return new IndividualBytesFieldCell(row, rOffset, rLength, return new IndividualBytesFieldCell(row, rOffset, rLength,

View File

@ -34,13 +34,12 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* An HBase Key/Value. This is the fundamental HBase Type. * An HBase Key/Value. This is the fundamental HBase Type.
* <p> * <p>
@ -766,7 +765,7 @@ public class KeyValue implements ExtendedCell {
if (qlength > Integer.MAX_VALUE - rlength - flength) { if (qlength > Integer.MAX_VALUE - rlength - flength) {
throw new IllegalArgumentException("Qualifier > " + Integer.MAX_VALUE); throw new IllegalArgumentException("Qualifier > " + Integer.MAX_VALUE);
} }
TagUtil.checkForTagsLength(tagsLength); RawCell.checkForTagsLength(tagsLength);
// Key length // Key length
long longkeylength = getKeyDataStructureSize(rlength, flength, qlength); long longkeylength = getKeyDataStructureSize(rlength, flength, qlength);
if (longkeylength > Integer.MAX_VALUE) { if (longkeylength > Integer.MAX_VALUE) {
@ -884,7 +883,7 @@ public class KeyValue implements ExtendedCell {
tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
} }
} }
TagUtil.checkForTagsLength(tagsLength); RawCell.checkForTagsLength(tagsLength);
int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength);
int keyValueLength = (int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, int keyValueLength = (int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength,
tagsLength); tagsLength);
@ -918,7 +917,7 @@ public class KeyValue implements ExtendedCell {
int tlen = t.getValueLength(); int tlen = t.getValueLength();
pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE); pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(buffer, pos, t.getType()); pos = Bytes.putByte(buffer, pos, t.getType());
TagUtil.copyValueTo(t, buffer, pos); Tag.copyValueTo(t, buffer, pos);
pos += tlen; pos += tlen;
} }
} }
@ -951,7 +950,7 @@ public class KeyValue implements ExtendedCell {
int vlength, byte[] tags, int tagsOffset, int tagsLength) { int vlength, byte[] tags, int tagsOffset, int tagsLength) {
checkParameters(row, rlength, family, flength, qlength, vlength); checkParameters(row, rlength, family, flength, qlength, vlength);
TagUtil.checkForTagsLength(tagsLength); RawCell.checkForTagsLength(tagsLength);
// Allocate right-sized byte array. // Allocate right-sized byte array.
int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength);
byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength,
@ -1001,7 +1000,7 @@ public class KeyValue implements ExtendedCell {
tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE; tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
} }
} }
TagUtil.checkForTagsLength(tagsLength); RawCell.checkForTagsLength(tagsLength);
// Allocate right-sized byte array. // Allocate right-sized byte array.
int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength); int keyLength = (int) getKeyDataStructureSize(rlength, flength, qlength);
byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength, byte[] bytes = new byte[(int) getKeyValueDataStructureSize(rlength, flength, qlength, vlength,
@ -1041,7 +1040,7 @@ public class KeyValue implements ExtendedCell {
int tlen = t.getValueLength(); int tlen = t.getValueLength();
pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE); pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(bytes, pos, t.getType()); pos = Bytes.putByte(bytes, pos, t.getType());
TagUtil.copyValueTo(t, bytes, pos); Tag.copyValueTo(t, bytes, pos);
pos += tlen; pos += tlen;
} }
} }

View File

@ -21,6 +21,15 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
class KeyValueBuilder extends ExtendedCellBuilderImpl { class KeyValueBuilder extends ExtendedCellBuilderImpl {
KeyValueBuilder() {
this(true);
}
KeyValueBuilder(boolean allowSeqIdUpdate) {
super(allowSeqIdUpdate);
}
@Override @Override
protected ExtendedCell innerBuild() { protected ExtendedCell innerBuild() {
KeyValue kv = new KeyValue(row, rOffset, rLength, KeyValue kv = new KeyValue(row, rOffset, rLength,

View File

@ -29,6 +29,7 @@ import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Optional;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
@ -49,14 +50,12 @@ import com.google.common.annotations.VisibleForTesting;
* rich set of APIs than those in {@link CellUtil} for internal usage. * rich set of APIs than those in {@link CellUtil} for internal usage.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
// TODO : Make Tag IA.LimitedPrivate and move some of the Util methods to CP exposed Util class public final class PrivateCellUtil {
public class PrivateCellUtil {
/** /**
* Private constructor to keep this class from being instantiated. * Private constructor to keep this class from being instantiated.
*/ */
private PrivateCellUtil() { private PrivateCellUtil() {
} }
/******************* ByteRange *******************************/ /******************* ByteRange *******************************/
@ -82,64 +81,6 @@ public class PrivateCellUtil {
return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); return range.set(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
} }
/**
* Returns tag value in a new byte array. If server-side, use {@link Tag#getValueArray()} with
* appropriate {@link Tag#getValueOffset()} and {@link Tag#getValueLength()} instead to save on
* allocations.
* @param cell
* @return tag value in a new byte array.
*/
public static byte[] getTagsArray(Cell cell) {
byte[] output = new byte[cell.getTagsLength()];
copyTagsTo(cell, output, 0);
return output;
}
public static byte[] cloneTags(Cell cell) {
byte[] output = new byte[cell.getTagsLength()];
copyTagsTo(cell, output, 0);
return output;
}
/**
* Copies the tags info into the tag portion of the cell
* @param cell
* @param destination
* @param destinationOffset
* @return position after tags
*/
public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) {
int tlen = cell.getTagsLength();
if (cell instanceof ByteBufferCell) {
ByteBufferUtils.copyFromBufferToArray(destination,
((ByteBufferCell) cell).getTagsByteBuffer(), ((ByteBufferCell) cell).getTagsPosition(),
destinationOffset, tlen);
} else {
System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset,
tlen);
}
return destinationOffset + tlen;
}
/**
* Copies the tags info into the tag portion of the cell
* @param cell
* @param destination
* @param destinationOffset
* @return the position after tags
*/
public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) {
int tlen = cell.getTagsLength();
if (cell instanceof ByteBufferCell) {
ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferCell) cell).getTagsByteBuffer(),
destination, ((ByteBufferCell) cell).getTagsPosition(), destinationOffset, tlen);
} else {
ByteBufferUtils.copyFromArrayToBuffer(destination, destinationOffset, cell.getTagsArray(),
cell.getTagsOffset(), tlen);
}
return destinationOffset + tlen;
}
/********************* misc *************************************/ /********************* misc *************************************/
public static byte getRowByte(Cell cell, int index) { public static byte getRowByte(Cell cell, int index) {
@ -168,7 +109,7 @@ public class PrivateCellUtil {
* @return A new cell which is having the extra tags also added to it. * @return A new cell which is having the extra tags also added to it.
*/ */
public static Cell createCell(Cell cell, List<Tag> tags) { public static Cell createCell(Cell cell, List<Tag> tags) {
return createCell(cell, TagUtil.fromList(tags)); return createCell(cell, Tag.fromList(tags));
} }
/** /**
@ -653,7 +594,9 @@ public class PrivateCellUtil {
ByteBufferUtils.putInt(out, valLen);// Value length ByteBufferUtils.putInt(out, valLen);// Value length
int len = 2 * Bytes.SIZEOF_INT; int len = 2 * Bytes.SIZEOF_INT;
len += writeFlatKey(cell, out);// Key len += writeFlatKey(cell, out);// Key
if (valLen > 0) out.write(value);// Value if (valLen > 0) {
out.write(value);// Value
}
len += valLen; len += valLen;
if (withTags && tags != null) { if (withTags && tags != null) {
// Write the tagsLength 2 bytes // Write the tagsLength 2 bytes
@ -787,7 +730,7 @@ public class PrivateCellUtil {
/** /**
* Finds if the qualifier part of the cell and the KV serialized byte[] are equal * Finds if the qualifier part of the cell and the KV serialized byte[] are equal
* @param left * @param left the cell with which we need to match the qualifier
* @param buf the serialized keyvalue format byte[] * @param buf the serialized keyvalue format byte[]
* @param offset the offset of the qualifier in the byte[] * @param offset the offset of the qualifier in the byte[]
* @param length the length of the qualifier in the byte[] * @param length the length of the qualifier in the byte[]
@ -809,7 +752,9 @@ public class PrivateCellUtil {
public static boolean matchingColumn(final Cell left, final byte[] fam, final int foffset, public static boolean matchingColumn(final Cell left, final byte[] fam, final int foffset,
final int flength, final byte[] qual, final int qoffset, final int qlength) { final int flength, final byte[] qual, final int qoffset, final int qlength) {
if (!matchingFamily(left, fam, foffset, flength)) return false; if (!matchingFamily(left, fam, foffset, flength)) {
return false;
}
return matchingQualifier(left, qual, qoffset, qlength); return matchingQualifier(left, qual, qoffset, qlength);
} }
@ -878,6 +823,113 @@ public class PrivateCellUtil {
return t == Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode(); return t == Type.DeleteColumn.getCode() || t == Type.DeleteFamily.getCode();
} }
public static byte[] cloneTags(Cell cell) {
byte[] output = new byte[cell.getTagsLength()];
copyTagsTo(cell, output, 0);
return output;
}
/**
* Copies the tags info into the tag portion of the cell
* @param cell
* @param destination
* @param destinationOffset
* @return position after tags
*/
public static int copyTagsTo(Cell cell, byte[] destination, int destinationOffset) {
int tlen = cell.getTagsLength();
if (cell instanceof ByteBufferCell) {
ByteBufferUtils.copyFromBufferToArray(destination,
((ByteBufferCell) cell).getTagsByteBuffer(), ((ByteBufferCell) cell).getTagsPosition(),
destinationOffset, tlen);
} else {
System.arraycopy(cell.getTagsArray(), cell.getTagsOffset(), destination, destinationOffset,
tlen);
}
return destinationOffset + tlen;
}
/**
* Copies the tags info into the tag portion of the cell
* @param cell
* @param destination
* @param destinationOffset
* @return the position after tags
*/
public static int copyTagsTo(Cell cell, ByteBuffer destination, int destinationOffset) {
int tlen = cell.getTagsLength();
if (cell instanceof ByteBufferCell) {
ByteBufferUtils.copyFromBufferToBuffer(((ByteBufferCell) cell).getTagsByteBuffer(),
destination, ((ByteBufferCell) cell).getTagsPosition(), destinationOffset, tlen);
} else {
ByteBufferUtils.copyFromArrayToBuffer(destination, destinationOffset, cell.getTagsArray(),
cell.getTagsOffset(), tlen);
}
return destinationOffset + tlen;
}
/**
* @param cell The Cell
* @return Tags in the given Cell as a List
*/
public static List<Tag> getTags(Cell cell) {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = tagsIterator(cell);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
/**
* Retrieve Cell's first tag, matching the passed in type
* @param cell The Cell
* @param type Type of the Tag to retrieve
* @return null if there is no tag of the passed in tag type
*/
public static Optional<Tag> getTag(Cell cell, byte type) {
boolean bufferBacked = cell instanceof ByteBufferCell;
int length = cell.getTagsLength();
int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen;
if (bufferBacked) {
ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
}
} else {
tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE);
if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
}
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
/**
* Util method to iterate through the tags in the given cell.
* @param cell The Cell over which tags iterator is needed.
* @return iterator for the tags
*/
public static Iterator<Tag> tagsIterator(final Cell cell) {
final int tagsLength = cell.getTagsLength();
// Save an object allocation where we can
if (tagsLength == 0) {
return TagUtil.EMPTY_TAGS_ITR;
}
if (cell instanceof ByteBufferCell) {
return tagsIterator(((ByteBufferCell) cell).getTagsByteBuffer(),
((ByteBufferCell) cell).getTagsPosition(), tagsLength);
}
return CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
private static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset, private static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
final int length) { final int length) {
return new Iterator<Tag>() { return new Iterator<Tag>() {
@ -907,95 +959,6 @@ public class PrivateCellUtil {
}; };
} }
/**
* Util method to iterate through the tags in the given cell.
* @param cell The Cell over which tags iterator is needed.
* @return iterator for the tags
*/
public static Iterator<Tag> tagsIterator(final Cell cell) {
final int tagsLength = cell.getTagsLength();
// Save an object allocation where we can
if (tagsLength == 0) {
return TagUtil.EMPTY_TAGS_ITR;
}
if (cell instanceof ByteBufferCell) {
return tagsIterator(((ByteBufferCell) cell).getTagsByteBuffer(),
((ByteBufferCell) cell).getTagsPosition(), tagsLength);
}
return tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
private static Iterator<Tag> tagsIterator(final byte[] tags, final int offset, final int length) {
return new Iterator<Tag>() {
private int pos = offset;
private int endOffset = offset + length - 1;
@Override
public boolean hasNext() {
return this.pos < endOffset;
}
@Override
public Tag next() {
if (hasNext()) {
int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE);
Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE);
this.pos += Bytes.SIZEOF_SHORT + curTagLen;
return tag;
}
return null;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* @param cell The Cell
* @return Tags in the given Cell as a List
*/
public static List<Tag> getTags(Cell cell) {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = tagsIterator(cell);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
/**
* Retrieve Cell's first tag, matching the passed in type
* @param cell The Cell
* @param type Type of the Tag to retrieve
* @return null if there is no tag of the passed in tag type
*/
public static Tag getTag(Cell cell, byte type) {
boolean bufferBacked = cell instanceof ByteBufferCell;
int length = cell.getTagsLength();
int offset = bufferBacked ? ((ByteBufferCell) cell).getTagsPosition() : cell.getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen;
if (bufferBacked) {
ByteBuffer tagsBuffer = ((ByteBufferCell) cell).getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE);
}
} else {
tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE);
if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE);
}
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return null;
}
/** /**
* Returns true if the first range start1...end1 overlaps with the second range start2...end2, * Returns true if the first range start1...end1 overlaps with the second range start2...end2,
* assuming the byte arrays represent row keys * assuming the byte arrays represent row keys

View File

@ -0,0 +1,68 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.util.List;
import java.util.Optional;
import org.apache.yetus.audience.InterfaceAudience;
/**
* An extended version of cell that gives more power to CPs
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
public interface RawCell extends Cell {
static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1;
/**
* Allows cloning the tags in the cell to a new byte[]
* @return the byte[] having the tags
*/
default byte[] cloneTags() {
return PrivateCellUtil.cloneTags(this);
}
/**
* Creates a list of tags in the current cell
* @return a list of tags
*/
default List<Tag> getTags() {
return PrivateCellUtil.getTags(this);
}
/**
* Returns the specific tag of the given type
* @param type the type of the tag
* @return the specific tag if available or null
*/
// TODO : Move to individual cell impl
default Optional<Tag> getTag(byte type) {
return PrivateCellUtil.getTag(this, type);
}
/**
* Check the length of tags. If it is invalid, throw IllegalArgumentException
* @param tagsLength the given length of tags
* @throws IllegalArgumentException if tagslength is invalid
*/
public static void checkForTagsLength(int tagsLength) {
if (tagsLength > MAX_TAGS_LENGTH) {
throw new IllegalArgumentException("tagslength " + tagsLength + " > " + MAX_TAGS_LENGTH);
}
}
}

View File

@ -20,10 +20,12 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.List;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/** /**
* Tags are part of cells and helps to add metadata about them. * Tags are part of cells and helps to add metadata about them.
@ -33,7 +35,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* <p> * <p>
* See {@link TagType} for reserved tag types. * See {@link TagType} for reserved tag types.
*/ */
@InterfaceAudience.Private @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface Tag { public interface Tag {
@ -42,6 +44,11 @@ public interface Tag {
public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE; public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE;
public static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE; public static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE;
/**
* Custom tags if created are suggested to be above this range. So that
* it does not overlap with internal tag types
*/
public static final byte CUSTOM_TAG_TYPE_RANGE = (byte)64;
/** /**
* @return the tag type * @return the tag type
*/ */
@ -75,4 +82,129 @@ public interface Tag {
* @return The {@link java.nio.ByteBuffer} containing the value bytes. * @return The {@link java.nio.ByteBuffer} containing the value bytes.
*/ */
ByteBuffer getValueByteBuffer(); ByteBuffer getValueByteBuffer();
/**
* Returns tag value in a new byte array. Primarily for use client-side. If server-side, use
* {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} and
* {@link Tag#getValueLength()} instead to save on allocations.
* @param tag The Tag whose value to be returned
* @return tag value in a new byte array.
*/
public static byte[] cloneValue(Tag tag) {
int tagLength = tag.getValueLength();
byte[] tagArr = new byte[tagLength];
if (tag.hasArray()) {
Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength);
} else {
ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(),
0, tagLength);
}
return tagArr;
}
/**
* Converts the value bytes of the given tag into a String value
* @param tag The Tag
* @return value as String
*/
public static String getValueAsString(Tag tag) {
if (tag.hasArray()) {
return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
}
return Bytes.toString(cloneValue(tag));
}
/**
* Matches the value part of given tags
* @param t1 Tag to match the value
* @param t2 Tag to match the value
* @return True if values of both tags are same.
*/
public static boolean matchingValue(Tag t1, Tag t2) {
if (t1.hasArray() && t2.hasArray()) {
return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(),
t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
}
if (t1.hasArray()) {
return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(),
t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength());
}
if (t2.hasArray()) {
return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(),
t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
}
return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(),
t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength());
}
/**
* Copies the tag's value bytes to the given byte array
* @param tag The Tag
* @param out The byte array where to copy the Tag value.
* @param offset The offset within 'out' array where to copy the Tag value.
*/
public static void copyValueTo(Tag tag, byte[] out, int offset) {
if (tag.hasArray()) {
Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
} else {
ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(),
offset, tag.getValueLength());
}
}
/**
* Write a list of tags into a byte array
* @param tags The list of tags
* @return the serialized tag data as bytes
*/
// TODO : Remove this when we move to RawCellBuilder
public static byte[] fromList(List<Tag> tags) {
if (tags == null || tags.isEmpty()) {
return HConstants.EMPTY_BYTE_ARRAY;
}
int length = 0;
for (Tag tag : tags) {
length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
byte[] b = new byte[length];
int pos = 0;
int tlen;
for (Tag tag : tags) {
tlen = tag.getValueLength();
pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(b, pos, tag.getType());
if (tag.hasArray()) {
pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
} else {
ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
pos, tlen);
pos += tlen;
}
}
return b;
}
/**
* Converts the value bytes of the given tag into a long value
* @param tag The Tag
* @return value as long
*/
public static long getValueAsLong(Tag tag) {
if (tag.hasArray()) {
return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
}
return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset());
}
/**
* Converts the value bytes of the given tag into a byte value
* @param tag The Tag
* @return value as byte
*/
public static byte getValueAsByte(Tag tag) {
if (tag.hasArray()) {
return tag.getValueArray()[tag.getValueOffset()];
}
return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset());
}
} }

View File

@ -17,55 +17,24 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public final class TagUtil { public final class TagUtil {
// If you would like to check the length of tags, please call {@link TagUtil#checkForTagsLength()}.
private static final int MAX_TAGS_LENGTH = (2 * Short.MAX_VALUE) + 1;
/**
* Private constructor to keep this class from being instantiated.
*/
private TagUtil(){} private TagUtil(){}
/**
* Returns tag value in a new byte array.
* Primarily for use client-side. If server-side, use
* {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()}
* and {@link Tag#getValueLength()} instead to save on allocations.
*
* @param tag The Tag whose value to be returned
* @return tag value in a new byte array.
*/
public static byte[] cloneValue(Tag tag) {
int tagLength = tag.getValueLength();
byte[] tagArr = new byte[tagLength];
if (tag.hasArray()) {
Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength);
} else {
ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(),
0, tagLength);
}
return tagArr;
}
/** /**
* Creates list of tags from given byte array, expected that it is in the expected tag format. * Creates list of tags from given byte array, expected that it is in the expected tag format.
*
* @param b The byte array * @param b The byte array
* @param offset The offset in array where tag bytes begin * @param offset The offset in array where tag bytes begin
* @param length Total length of all tags bytes * @param length Total length of all tags bytes
@ -75,138 +44,13 @@ public final class TagUtil {
List<Tag> tags = new ArrayList<>(); List<Tag> tags = new ArrayList<>();
int pos = offset; int pos = offset;
while (pos < offset + length) { while (pos < offset + length) {
int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE); int tagLen = Bytes.readAsInt(b, pos, Tag.TAG_LENGTH_SIZE);
tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE)); tags.add(new ArrayBackedTag(b, pos, tagLen + Tag.TAG_LENGTH_SIZE));
pos += TAG_LENGTH_SIZE + tagLen; pos += Tag.TAG_LENGTH_SIZE + tagLen;
} }
return tags; return tags;
} }
/**
* Creates list of tags from given ByteBuffer, expected that it is in the expected tag format.
*
* @param b The ByteBuffer
* @param offset The offset in ByteBuffer where tag bytes begin
* @param length Total length of all tags bytes
* @return List of tags
*/
public static List<Tag> asList(ByteBuffer b, int offset, int length) {
List<Tag> tags = new ArrayList<>();
int pos = offset;
while (pos < offset + length) {
int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE);
tags.add(new ByteBufferTag(b, pos, tagLen + TAG_LENGTH_SIZE));
pos += TAG_LENGTH_SIZE + tagLen;
}
return tags;
}
/**
* Write a list of tags into a byte array
*
* @param tags The list of tags
* @return the serialized tag data as bytes
*/
public static byte[] fromList(List<Tag> tags) {
if (tags == null || tags.isEmpty()) {
return HConstants.EMPTY_BYTE_ARRAY;
}
int length = 0;
for (Tag tag : tags) {
length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
byte[] b = new byte[length];
int pos = 0;
int tlen;
for (Tag tag : tags) {
tlen = tag.getValueLength();
pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(b, pos, tag.getType());
if (tag.hasArray()) {
pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
} else {
ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
pos, tlen);
pos += tlen;
}
}
return b;
}
/**
* Converts the value bytes of the given tag into a long value
* @param tag The Tag
* @return value as long
*/
public static long getValueAsLong(Tag tag) {
if (tag.hasArray()) {
return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
}
return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset());
}
/**
* Converts the value bytes of the given tag into a byte value
* @param tag The Tag
* @return value as byte
*/
public static byte getValueAsByte(Tag tag) {
if (tag.hasArray()) {
return tag.getValueArray()[tag.getValueOffset()];
}
return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset());
}
/**
* Converts the value bytes of the given tag into a String value
* @param tag The Tag
* @return value as String
*/
public static String getValueAsString(Tag tag){
if(tag.hasArray()){
return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
}
return Bytes.toString(cloneValue(tag));
}
/**
* Matches the value part of given tags
* @param t1 Tag to match the value
* @param t2 Tag to match the value
* @return True if values of both tags are same.
*/
public static boolean matchingValue(Tag t1, Tag t2) {
if (t1.hasArray() && t2.hasArray()) {
return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(),
t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
}
if (t1.hasArray()) {
return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(),
t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength());
}
if (t2.hasArray()) {
return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(),
t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
}
return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(),
t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength());
}
/**
* Copies the tag's value bytes to the given byte array
* @param tag The Tag
* @param out The byte array where to copy the Tag value.
* @param offset The offset within 'out' array where to copy the Tag value.
*/
public static void copyValueTo(Tag tag, byte[] out, int offset) {
if (tag.hasArray()) {
Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
} else {
ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(),
offset, tag.getValueLength());
}
}
/** /**
* Reads an int value stored as a VInt at tag's given offset. * Reads an int value stored as a VInt at tag's given offset.
* @param tag The Tag * @param tag The Tag
@ -313,16 +157,4 @@ public final class TagUtil {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
}; };
/**
* Check the length of tags. If it is invalid, throw IllegalArgumentException
*
* @param tagsLength
* @throws IllegalArgumentException if tagslength is invalid
*/
public static void checkForTagsLength(int tagsLength) {
if (tagsLength > MAX_TAGS_LENGTH) {
throw new IllegalArgumentException("tagslength "+ tagsLength + " > " + MAX_TAGS_LENGTH);
}
}
} }

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertFalse;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
@ -56,7 +56,7 @@ public class TestByteBufferKeyValue {
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1); KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1);
ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
ByteBufferCell offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0l); ByteBufferCell offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0L);
assertEquals( assertEquals(
ROW1, ROW1,
ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(),
@ -138,7 +138,7 @@ public class TestByteBufferKeyValue {
KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1, tags); KeyValue kvCell = new KeyValue(row1, fam1, qual1, 0l, Type.Put, row1, tags);
ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length); ByteBuffer buf = ByteBuffer.allocateDirect(kvCell.getBuffer().length);
ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length); ByteBufferUtils.copyFromArrayToBuffer(buf, kvCell.getBuffer(), 0, kvCell.getBuffer().length);
ByteBufferCell offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0l); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(buf, 0, buf.capacity(), 0l);
assertEquals( assertEquals(
ROW1, ROW1,
ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(), ByteBufferUtils.toStringBinary(offheapKV.getRowByteBuffer(),
@ -158,18 +158,19 @@ public class TestByteBufferKeyValue {
assertEquals(0L, offheapKV.getTimestamp()); assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte()); assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
// change tags to handle both onheap and offheap stuff // change tags to handle both onheap and offheap stuff
List<Tag> resTags = TagUtil.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), List<Tag> resTags = offheapKV.getTags();
offheapKV.getTagsLength());
Tag tag1 = resTags.get(0); Tag tag1 = resTags.get(0);
assertEquals(t1.getType(), tag1.getType()); assertEquals(t1.getType(), tag1.getType());
assertEquals(TagUtil.getValueAsString(t1), TagUtil.getValueAsString(tag1)); assertEquals(Tag.getValueAsString(t1),
Tag.getValueAsString(tag1));
Tag tag2 = resTags.get(1); Tag tag2 = resTags.get(1);
assertEquals(tag2.getType(), tag2.getType()); assertEquals(tag2.getType(), tag2.getType());
assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); assertEquals(Tag.getValueAsString(t2),
Tag res = PrivateCellUtil.getTag(offheapKV, (byte) 2); Tag.getValueAsString(tag2));
assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2)); Tag res = PrivateCellUtil.getTag(offheapKV, (byte) 2).get();
res = PrivateCellUtil.getTag(offheapKV, (byte) 3); assertEquals(Tag.getValueAsString(t2),
assertNull(res); Tag.getValueAsString(tag2));
assertFalse(PrivateCellUtil.getTag(offheapKV, (byte) 3).isPresent());
} }
@Test @Test

View File

@ -459,11 +459,11 @@ public class TestKeyValue extends TestCase {
boolean meta1Ok = false, meta2Ok = false; boolean meta1Ok = false, meta2Ok = false;
for (Tag tag : tags) { for (Tag tag : tags) {
if (tag.getType() == (byte) 1) { if (tag.getType() == (byte) 1) {
if (Bytes.equals(TagUtil.cloneValue(tag), metaValue1)) { if (Bytes.equals(Tag.cloneValue(tag), metaValue1)) {
meta1Ok = true; meta1Ok = true;
} }
} else { } else {
if (Bytes.equals(TagUtil.cloneValue(tag), metaValue2)) { if (Bytes.equals(Tag.cloneValue(tag), metaValue2)) {
meta2Ok = true; meta2Ok = true;
} }
} }
@ -476,12 +476,12 @@ public class TestKeyValue extends TestCase {
Tag next = tagItr.next(); Tag next = tagItr.next();
assertEquals(10, next.getValueLength()); assertEquals(10, next.getValueLength());
assertEquals((byte) 1, next.getType()); assertEquals((byte) 1, next.getType());
Bytes.equals(TagUtil.cloneValue(next), metaValue1); Bytes.equals(Tag.cloneValue(next), metaValue1);
assertTrue(tagItr.hasNext()); assertTrue(tagItr.hasNext());
next = tagItr.next(); next = tagItr.next();
assertEquals(10, next.getValueLength()); assertEquals(10, next.getValueLength());
assertEquals((byte) 2, next.getType()); assertEquals((byte) 2, next.getType());
Bytes.equals(TagUtil.cloneValue(next), metaValue2); Bytes.equals(Tag.cloneValue(next), metaValue2);
assertFalse(tagItr.hasNext()); assertFalse(tagItr.hasNext());
tagItr = PrivateCellUtil.tagsIterator(kv); tagItr = PrivateCellUtil.tagsIterator(kv);
@ -489,12 +489,12 @@ public class TestKeyValue extends TestCase {
next = tagItr.next(); next = tagItr.next();
assertEquals(10, next.getValueLength()); assertEquals(10, next.getValueLength());
assertEquals((byte) 1, next.getType()); assertEquals((byte) 1, next.getType());
Bytes.equals(TagUtil.cloneValue(next), metaValue1); Bytes.equals(Tag.cloneValue(next), metaValue1);
assertTrue(tagItr.hasNext()); assertTrue(tagItr.hasNext());
next = tagItr.next(); next = tagItr.next();
assertEquals(10, next.getValueLength()); assertEquals(10, next.getValueLength());
assertEquals((byte) 2, next.getType()); assertEquals((byte) 2, next.getType());
Bytes.equals(TagUtil.cloneValue(next), metaValue2); Bytes.equals(Tag.cloneValue(next), metaValue2);
assertFalse(tagItr.hasNext()); assertFalse(tagItr.hasNext());
} }

View File

@ -37,13 +37,13 @@ public class TestTagUtil {
assertEquals(1, tags.size()); assertEquals(1, tags.size());
Tag ttlTag = tags.get(0); Tag ttlTag = tags.get(0);
assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType()); assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType());
assertEquals(ttl, TagUtil.getValueAsLong(ttlTag)); assertEquals(ttl, Tag.getValueAsLong(ttlTag));
// Already having a TTL tag in the list. So the call must remove the old tag // Already having a TTL tag in the list. So the call must remove the old tag
long ttl2 = 30 * 1000; long ttl2 = 30 * 1000;
tags = TagUtil.carryForwardTTLTag(tags, ttl2); tags = TagUtil.carryForwardTTLTag(tags, ttl2);
assertEquals(1, tags.size()); assertEquals(1, tags.size());
ttlTag = tags.get(0); ttlTag = tags.get(0);
assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType()); assertEquals(TagType.TTL_TAG_TYPE, ttlTag.getType());
assertEquals(ttl2, TagUtil.getValueAsLong(ttlTag)); assertEquals(ttl2, Tag.getValueAsLong(ttlTag));
} }
} }

View File

@ -28,22 +28,21 @@ import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream;
import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream;
import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream;
@Category({MiscTests.class, SmallTests.class}) @Category({MiscTests.class, SmallTests.class})
public class TestCellCodecWithTags { public class TestCellCodecWithTags {
@ -79,36 +78,37 @@ public class TestCellCodecWithTags {
assertTrue(decoder.advance()); assertTrue(decoder.advance());
Cell c = decoder.current(); Cell c = decoder.current();
assertTrue(CellUtil.equals(c, cell1)); assertTrue(CellUtil.equals(c, cell1));
List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); List<Tag> tags =
((RawCell)c).getTags();
assertEquals(2, tags.size()); assertEquals(2, tags.size());
Tag tag = tags.get(0); Tag tag = tags.get(0);
assertEquals(1, tag.getType()); assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), Tag.cloneValue(tag)));
tag = tags.get(1); tag = tags.get(1);
assertEquals(2, tag.getType()); assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), Tag.cloneValue(tag)));
assertTrue(decoder.advance()); assertTrue(decoder.advance());
c = decoder.current(); c = decoder.current();
assertTrue(CellUtil.equals(c, cell2)); assertTrue(CellUtil.equals(c, cell2));
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); tags = ((RawCell)c).getTags();
assertEquals(1, tags.size()); assertEquals(1, tags.size());
tag = tags.get(0); tag = tags.get(0);
assertEquals(1, tag.getType()); assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), Tag.cloneValue(tag)));
assertTrue(decoder.advance()); assertTrue(decoder.advance());
c = decoder.current(); c = decoder.current();
assertTrue(CellUtil.equals(c, cell3)); assertTrue(CellUtil.equals(c, cell3));
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); tags = ((RawCell)c).getTags();
assertEquals(3, tags.size()); assertEquals(3, tags.size());
tag = tags.get(0); tag = tags.get(0);
assertEquals(2, tag.getType()); assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), Tag.cloneValue(tag)));
tag = tags.get(1); tag = tags.get(1);
assertEquals(2, tag.getType()); assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), Tag.cloneValue(tag)));
tag = tags.get(2); tag = tags.get(2);
assertEquals(1, tag.getType()); assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), Tag.cloneValue(tag)));
assertFalse(decoder.advance()); assertFalse(decoder.advance());
dis.close(); dis.close();
assertEquals(offset, cis.getCount()); assertEquals(offset, cis.getCount());

View File

@ -28,22 +28,21 @@ import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream;
import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream;
import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream;
@Category({MiscTests.class, SmallTests.class}) @Category({MiscTests.class, SmallTests.class})
public class TestKeyValueCodecWithTags { public class TestKeyValueCodecWithTags {
@ -79,36 +78,37 @@ public class TestKeyValueCodecWithTags {
assertTrue(decoder.advance()); assertTrue(decoder.advance());
Cell c = decoder.current(); Cell c = decoder.current();
assertTrue(CellUtil.equals(c, kv1)); assertTrue(CellUtil.equals(c, kv1));
List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); List<Tag> tags =
((RawCell)c).getTags();
assertEquals(2, tags.size()); assertEquals(2, tags.size());
Tag tag = tags.get(0); Tag tag = tags.get(0);
assertEquals(1, tag.getType()); assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), Tag.cloneValue(tag)));
tag = tags.get(1); tag = tags.get(1);
assertEquals(2, tag.getType()); assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), Tag.cloneValue(tag)));
assertTrue(decoder.advance()); assertTrue(decoder.advance());
c = decoder.current(); c = decoder.current();
assertTrue(CellUtil.equals(c, kv2)); assertTrue(CellUtil.equals(c, kv2));
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); tags = ((RawCell)c).getTags();
assertEquals(1, tags.size()); assertEquals(1, tags.size());
tag = tags.get(0); tag = tags.get(0);
assertEquals(1, tag.getType()); assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), Tag.cloneValue(tag)));
assertTrue(decoder.advance()); assertTrue(decoder.advance());
c = decoder.current(); c = decoder.current();
assertTrue(CellUtil.equals(c, kv3)); assertTrue(CellUtil.equals(c, kv3));
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength()); tags = ((RawCell)c).getTags();
assertEquals(3, tags.size()); assertEquals(3, tags.size());
tag = tags.get(0); tag = tags.get(0);
assertEquals(2, tag.getType()); assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), Tag.cloneValue(tag)));
tag = tags.get(1); tag = tags.get(1);
assertEquals(2, tag.getType()); assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), Tag.cloneValue(tag)));
tag = tags.get(2); tag = tags.get(2);
assertEquals(1, tag.getType()); assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag))); assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), Tag.cloneValue(tag)));
assertFalse(decoder.advance()); assertFalse(decoder.advance());
dis.close(); dis.close();
assertEquals(offset, cis.getCount()); assertEquals(offset, cis.getCount());

View File

@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -65,7 +66,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -492,8 +492,7 @@ public class TestHFileOutputFormat2 {
HFileScanner scanner = reader.getScanner(false, false, false); HFileScanner scanner = reader.getScanner(false, false, false);
scanner.seekTo(); scanner.seekTo();
Cell cell = scanner.getCell(); Cell cell = scanner.getCell();
List<Tag> tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), List<Tag> tagsFromCell = ((RawCell)cell).getTags();
cell.getTagsLength());
assertTrue(tagsFromCell.size() > 0); assertTrue(tagsFromCell.size() > 0);
for (Tag tag : tagsFromCell) { for (Tag tag : tagsFromCell) {
assertTrue(tag.getType() == TagType.TTL_TAG_TYPE); assertTrue(tag.getType() == TagType.TTL_TAG_TYPE);

View File

@ -24,6 +24,7 @@ import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
@ -103,4 +104,11 @@ public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment<Reg
// so we do not want to allow coprocessors to export metrics at the region level. We can allow // so we do not want to allow coprocessors to export metrics at the region level. We can allow
// getMetricRegistryForTable() to allow coprocessors to track metrics per-table, per-regionserver. // getMetricRegistryForTable() to allow coprocessors to track metrics per-table, per-regionserver.
MetricRegistry getMetricRegistryForRegionServer(); MetricRegistry getMetricRegistryForRegionServer();
/**
* Returns a CellBuilder so that coprocessors can build cells. These cells can also include tags.
* Note that this builder does not support updating seqId of the cells
* @return the ExtendedCellBuilder
*/
ExtendedCellBuilder getCellBuilder();
} }

View File

@ -55,22 +55,20 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo; import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.mob.MobUtils; import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.TimeRangeTracker; import org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
import org.apache.hadoop.hbase.util.BloomFilter; import org.apache.hadoop.hbase.util.BloomFilter;
import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.BloomFilterFactory;
@ -80,6 +78,8 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.ConsoleReporter;
import com.codahale.metrics.Counter; import com.codahale.metrics.Counter;
@ -399,8 +399,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
+ Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(), + Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(),
cell.getValueLength())); cell.getValueLength()));
int i = 0; int i = 0;
List<Tag> tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), List<Tag> tags = ((RawCell)cell).getTags();
cell.getTagsLength());
for (Tag tag : tags) { for (Tag tag : tags) {
out.print(String.format(" T[%d]: %s", i++, tag.toString())); out.print(String.format(" T[%d]: %s", i++, tag.toString()));
} }
@ -442,7 +441,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
System.err.println("ERROR, wrong value format in mob reference cell " System.err.println("ERROR, wrong value format in mob reference cell "
+ CellUtil.getCellKeyAsString(cell)); + CellUtil.getCellKeyAsString(cell));
} else { } else {
TableName tn = TableName.valueOf(TagUtil.cloneValue(tnTag)); TableName tn = TableName.valueOf(Tag.cloneValue(tnTag));
String mobFileName = MobUtils.getMobFileName(cell); String mobFileName = MobUtils.getMobFileName(cell);
boolean exist = mobFileExists(fs, tn, mobFileName, boolean exist = mobFileExists(fs, tn, mobFileName,
Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles); Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles);

View File

@ -27,6 +27,7 @@ import java.util.Calendar;
import java.util.Collection; import java.util.Collection;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RejectedExecutionException;
@ -101,7 +102,7 @@ public final class MobUtils {
static { static {
List<Tag> tags = new ArrayList<>(); List<Tag> tags = new ArrayList<>();
tags.add(MobConstants.MOB_REF_TAG); tags.add(MobConstants.MOB_REF_TAG);
REF_DELETE_MARKER_TAG_BYTES = TagUtil.fromList(tags); REF_DELETE_MARKER_TAG_BYTES = Tag.fromList(tags);
} }
/** /**
@ -175,8 +176,10 @@ public final class MobUtils {
*/ */
public static boolean isMobReferenceCell(Cell cell) { public static boolean isMobReferenceCell(Cell cell) {
if (cell.getTagsLength() > 0) { if (cell.getTagsLength() > 0) {
Tag tag = PrivateCellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE); Optional<Tag> tag = PrivateCellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE);
return tag != null; if (tag.isPresent()) {
return true;
}
} }
return false; return false;
} }
@ -188,7 +191,10 @@ public final class MobUtils {
*/ */
public static Tag getTableNameTag(Cell cell) { public static Tag getTableNameTag(Cell cell) {
if (cell.getTagsLength() > 0) { if (cell.getTagsLength() > 0) {
return PrivateCellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE); Optional<Tag> tag = PrivateCellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE);
if (tag.isPresent()) {
return tag.get();
}
} }
return null; return null;
} }
@ -496,7 +502,7 @@ public final class MobUtils {
// find the original mob files by this table name. For details please see cloning // find the original mob files by this table name. For details please see cloning
// snapshot for mob files. // snapshot for mob files.
tags.add(tableNameTag); tags.add(tableNameTag);
return createMobRefCell(cell, fileName, TagUtil.fromList(tags)); return createMobRefCell(cell, fileName, Tag.fromList(tags));
} }
public static Cell createMobRefCell(Cell cell, byte[] fileName, byte[] refCellTags) { public static Cell createMobRefCell(Cell cell, byte[] fileName, byte[] refCellTags) {

View File

@ -54,7 +54,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -84,14 +83,13 @@ import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter; import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.regionserver.StoreScanner; import org.apache.hadoop.hbase.regionserver.StoreScanner;
import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.EncryptionUtil;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
/** /**
* An implementation of {@link MobCompactor} that compacts the mob files in partitions. * An implementation of {@link MobCompactor} that compacts the mob files in partitions.
*/ */
@ -133,7 +131,7 @@ public class PartitionedMobCompactor extends MobCompactor {
tags.add(MobConstants.MOB_REF_TAG); tags.add(MobConstants.MOB_REF_TAG);
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName()); Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName());
tags.add(tableNameTag); tags.add(tableNameTag);
this.refCellTags = TagUtil.fromList(tags); this.refCellTags = Tag.fromList(tags);
cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column); cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column);
} }

View File

@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
@ -121,7 +120,7 @@ public class HMobStore extends HStore {
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
getTableName().getName()); getTableName().getName());
tags.add(tableNameTag); tags.add(tableNameTag);
this.refCellTags = TagUtil.fromList(tags); this.refCellTags = Tag.fromList(tags);
} }
/** /**
@ -332,7 +331,7 @@ public class HMobStore extends HStore {
String fileName = MobUtils.getMobFileName(reference); String fileName = MobUtils.getMobFileName(reference);
Tag tableNameTag = MobUtils.getTableNameTag(reference); Tag tableNameTag = MobUtils.getTableNameTag(reference);
if (tableNameTag != null) { if (tableNameTag != null) {
String tableNameString = TagUtil.getValueAsString(tableNameTag); String tableNameString = Tag.getValueAsString(tableNameTag);
List<Path> locations = map.get(tableNameString); List<Path> locations = map.get(tableNameString);
if (locations == null) { if (locations == null) {
IdLock.Entry lockEntry = keyLock.getLockEntry(tableNameString.hashCode()); IdLock.Entry lockEntry = keyLock.getLockEntry(tableNameString.hashCode());
@ -359,12 +358,15 @@ public class HMobStore extends HStore {
+ "qualifier,timestamp,type and tags but with an empty value to return."); + "qualifier,timestamp,type and tags but with an empty value to return.");
result = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) result = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(reference.getRowArray(), reference.getRowOffset(), reference.getRowLength()) .setRow(reference.getRowArray(), reference.getRowOffset(), reference.getRowLength())
.setFamily(reference.getFamilyArray(), reference.getFamilyOffset(), reference.getFamilyLength()) .setFamily(reference.getFamilyArray(), reference.getFamilyOffset(),
.setQualifier(reference.getQualifierArray(), reference.getQualifierOffset(), reference.getQualifierLength()) reference.getFamilyLength())
.setQualifier(reference.getQualifierArray(),
reference.getQualifierOffset(), reference.getQualifierLength())
.setTimestamp(reference.getTimestamp()) .setTimestamp(reference.getTimestamp())
.setType(reference.getTypeByte()) .setType(reference.getTypeByte())
.setValue(HConstants.EMPTY_BYTE_ARRAY) .setValue(HConstants.EMPTY_BYTE_ARRAY)
.setTags(reference.getTagsArray(), reference.getTagsOffset(), reference.getTagsLength()) .setTags(reference.getTagsArray(), reference.getTagsOffset(),
reference.getTagsLength())
.build(); .build();
} }
return result; return result;

View File

@ -7714,7 +7714,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
.setTimestamp(Math.max(currentCell.getTimestamp() + 1, now)) .setTimestamp(Math.max(currentCell.getTimestamp() + 1, now))
.setType(KeyValue.Type.Put.getCode()) .setType(KeyValue.Type.Put.getCode())
.setValue(newValue, 0, newValue.length) .setValue(newValue, 0, newValue.length)
.setTags(TagUtil.fromList(tags)) .setTags(Tag.fromList(tags))
.build(); .build();
} else { } else {
PrivateCellUtil.updateLatestStamp(delta, now); PrivateCellUtil.updateLatestStamp(delta, now);

View File

@ -38,8 +38,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
@ -80,6 +83,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTrack
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker; import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CoprocessorClassLoader; import org.apache.hadoop.hbase.util.CoprocessorClassLoader;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
@ -87,8 +91,6 @@ import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/** /**
* Implements the coprocessor environment and runtime support for coprocessors * Implements the coprocessor environment and runtime support for coprocessors
* loaded within a {@link Region}. * loaded within a {@link Region}.
@ -179,6 +181,13 @@ public class RegionCoprocessorHost
public MetricRegistry getMetricRegistryForRegionServer() { public MetricRegistry getMetricRegistryForRegionServer() {
return metricRegistry; return metricRegistry;
} }
@Override
public ExtendedCellBuilder getCellBuilder() {
// do not allow seqId update.
// We always do a DEEP_COPY only
return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY, false);
}
} }
/** /**

View File

@ -25,14 +25,12 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
@ -43,6 +41,7 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityNewVersionBehaivorT
import org.apache.hadoop.hbase.security.visibility.VisibilityScanDeleteTracker; import org.apache.hadoop.hbase.security.visibility.VisibilityScanDeleteTracker;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* A query matcher that is specifically designed for the scan case. * A query matcher that is specifically designed for the scan case.
@ -157,7 +156,7 @@ public abstract class ScanQueryMatcher implements ShipperListener {
// to convert // to convert
long ts = cell.getTimestamp(); long ts = cell.getTimestamp();
assert t.getValueLength() == Bytes.SIZEOF_LONG; assert t.getValueLength() == Bytes.SIZEOF_LONG;
long ttl = TagUtil.getValueAsLong(t); long ttl = Tag.getValueAsLong(t);
if (ts + ttl < now) { if (ts + ttl < now) {
return true; return true;
} }

View File

@ -18,13 +18,6 @@
package org.apache.hadoop.hbase.security.access; package org.apache.hadoop.hbase.security.access;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataInputStream; import java.io.DataInputStream;
@ -45,12 +38,12 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -69,12 +62,16 @@ import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Maintains lists of permission grants to users and groups to allow for * Maintains lists of permission grants to users and groups to allow for
@ -752,7 +749,7 @@ public class AccessControlLists {
if (tag.hasArray()) { if (tag.hasArray()) {
ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength()); ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
} else { } else {
ProtobufUtil.mergeFrom(builder, TagUtil.cloneValue(tag)); ProtobufUtil.mergeFrom(builder, Tag.cloneValue(tag));
} }
ListMultimap<String,Permission> kvPerms = ListMultimap<String,Permission> kvPerms =
AccessControlUtil.toUsersAndPermissions(builder.build()); AccessControlUtil.toUsersAndPermissions(builder.build());

View File

@ -52,10 +52,10 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
@ -123,6 +123,13 @@ import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -133,14 +140,6 @@ import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
/** /**
* Provides basic authorization checks for data access and administrative * Provides basic authorization checks for data access and administrative
* operations. * operations.

View File

@ -606,7 +606,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
for (Tag tag : deleteVisTags) { for (Tag tag : deleteVisTags) {
matchFound = false; matchFound = false;
for (Tag givenTag : putVisTags) { for (Tag givenTag : putVisTags) {
if (TagUtil.matchingValue(tag, givenTag)) { if (Tag.matchingValue(tag, givenTag)) {
matchFound = true; matchFound = true;
break; break;
} }

View File

@ -18,16 +18,16 @@
package org.apache.hadoop.hbase.security.visibility; package org.apache.hadoop.hbase.security.visibility;
import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SANITY_CHECK_FAILURE;
import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS;
import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY;
import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
import com.google.protobuf.ByteString; import com.google.protobuf.ByteString;
import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController; import com.google.protobuf.RpcController;
import com.google.protobuf.Service; import com.google.protobuf.Service;
import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SANITY_CHECK_FAILURE;
import static org.apache.hadoop.hbase.HConstants.OperationStatusCode.SUCCESS;
import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_FAMILY;
import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LABELS_TABLE_NAME;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.util.ArrayList; import java.util.ArrayList;
@ -49,12 +49,11 @@ import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -109,14 +108,13 @@ import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessController; import org.apache.hadoop.hbase.security.access.AccessController;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
/** /**
* Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in
* visibility labels * visibility labels
@ -321,7 +319,7 @@ public class VisibilityController implements MasterCoprocessor, RegionCoprocesso
Tag tag = pair.getSecond(); Tag tag = pair.getSecond();
if (cellVisibility == null && tag != null) { if (cellVisibility == null && tag != null) {
// May need to store only the first one // May need to store only the first one
cellVisibility = new CellVisibility(TagUtil.getValueAsString(tag)); cellVisibility = new CellVisibility(Tag.getValueAsString(tag));
modifiedTagFound = true; modifiedTagFound = true;
} }
} }

View File

@ -39,11 +39,9 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
@ -217,7 +215,7 @@ public class VisibilityUtils {
while (tagsIterator.hasNext()) { while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next(); Tag tag = tagsIterator.next();
if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
serializationFormat = TagUtil.getValueAsByte(tag); serializationFormat = Tag.getValueAsByte(tag);
} else if (tag.getType() == VISIBILITY_TAG_TYPE) { } else if (tag.getType() == VISIBILITY_TAG_TYPE) {
tags.add(tag); tags.add(tag);
} }
@ -244,7 +242,7 @@ public class VisibilityUtils {
while (tagsIterator.hasNext()) { while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next(); Tag tag = tagsIterator.next();
if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) { if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
serializationFormat = TagUtil.getValueAsByte(tag); serializationFormat = Tag.getValueAsByte(tag);
} else if (tag.getType() == VISIBILITY_TAG_TYPE) { } else if (tag.getType() == VISIBILITY_TAG_TYPE) {
visTags.add(tag); visTags.add(tag);
} else { } else {

View File

@ -27,7 +27,6 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
@ -43,12 +42,13 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader; import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import com.fasterxml.jackson.databind.ObjectMapper;
/** /**
* WALPrettyPrinter prints the contents of a given WAL with a variety of * WALPrettyPrinter prints the contents of a given WAL with a variety of
@ -343,7 +343,8 @@ public class WALPrettyPrinter {
Iterator<Tag> tagsIterator = PrivateCellUtil.tagsIterator(cell); Iterator<Tag> tagsIterator = PrivateCellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) { while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next(); Tag tag = tagsIterator.next();
tagsString.add((tag.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(tag))); tagsString
.add((tag.getType()) + ":" + Bytes.toStringBinary(Tag.cloneValue(tag)));
} }
stringMap.put("tag", tagsString); stringMap.put("tag", tagsString);
} }

View File

@ -17,9 +17,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.io.hfile; package org.apache.hadoop.hbase.io.hfile;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Used in testcases only. * Used in testcases only.
*/ */
@InterfaceAudience.Private
public enum TagUsage { public enum TagUsage {
// No tags would be added // No tags would be added
NO_TAG, NO_TAG,

View File

@ -18,37 +18,35 @@
*/ */
package org.apache.hadoop.hbase.io.hfile; package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
@ -176,7 +174,7 @@ public class TestSeekTo {
Iterator<Tag> tagsIterator = PrivateCellUtil.tagsIterator(cell); Iterator<Tag> tagsIterator = PrivateCellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) { while (tagsIterator.hasNext()) {
Tag next = tagsIterator.next(); Tag next = tagsIterator.next();
assertEquals("myTag1", Bytes.toString(TagUtil.cloneValue(next))); assertEquals("myTag1", Bytes.toString(Tag.cloneValue(next)));
} }
} }
assertTrue(scanner.seekBefore(toKV("k", tagUsage))); assertTrue(scanner.seekBefore(toKV("k", tagUsage)));

View File

@ -20,20 +20,22 @@ package org.apache.hadoop.hbase.protobuf;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
@ -42,7 +44,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Col
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -333,7 +334,8 @@ public class TestProtobufUtil {
dbb.put(arr); dbb.put(arr);
ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength());
CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV);
Cell newOffheapKV = ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); Cell newOffheapKV =
ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell);
assertTrue(CellComparatorImpl.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); assertTrue(CellComparatorImpl.COMPARATOR.compare(offheapKV, newOffheapKV) == 0);
} }
} }

View File

@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
@ -90,7 +89,7 @@ public class TestStoreFileScannerWithTagCompression {
kv.getRowLength())); kv.getRowLength()));
List<Tag> tags = KeyValueUtil.ensureKeyValue(kv).getTags(); List<Tag> tags = KeyValueUtil.ensureKeyValue(kv).getTags();
assertEquals(1, tags.size()); assertEquals(1, tags.size());
assertEquals("tag3", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tag3", Bytes.toString(Tag.cloneValue(tags.get(0))));
} finally { } finally {
s.close(); s.close();
} }

View File

@ -26,6 +26,7 @@ import java.util.List;
import java.util.Optional; import java.util.Optional;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
@ -35,10 +36,9 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.CompactionState; import org.apache.hadoop.hbase.client.CompactionState;
@ -56,10 +56,10 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -406,7 +406,7 @@ public class TestTags {
List<Tag> tags = TestCoprocessorForTags.tags; List<Tag> tags = TestCoprocessorForTags.tags;
assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(1, tags.size()); assertEquals(1, tags.size());
assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null; TestCoprocessorForTags.tags = null;
@ -424,7 +424,7 @@ public class TestTags {
// We cannot assume the ordering of tags // We cannot assume the ordering of tags
List<String> tagValues = new ArrayList<>(); List<String> tagValues = new ArrayList<>();
for (Tag tag: tags) { for (Tag tag: tags) {
tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); tagValues.add(Bytes.toString(Tag.cloneValue(tag)));
} }
assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag1"));
assertTrue(tagValues.contains("tag2")); assertTrue(tagValues.contains("tag2"));
@ -446,7 +446,7 @@ public class TestTags {
tags = TestCoprocessorForTags.tags; tags = TestCoprocessorForTags.tags;
assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength())); assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(1, tags.size()); assertEquals(1, tags.size());
assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null; TestCoprocessorForTags.tags = null;
@ -465,7 +465,7 @@ public class TestTags {
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags; tags = TestCoprocessorForTags.tags;
assertEquals(1, tags.size()); assertEquals(1, tags.size());
assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tag1", Bytes.toString(Tag.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null; TestCoprocessorForTags.tags = null;
@ -482,7 +482,7 @@ public class TestTags {
// We cannot assume the ordering of tags // We cannot assume the ordering of tags
tagValues.clear(); tagValues.clear();
for (Tag tag: tags) { for (Tag tag: tags) {
tagValues.add(Bytes.toString(TagUtil.cloneValue(tag))); tagValues.add(Bytes.toString(Tag.cloneValue(tag)));
} }
assertTrue(tagValues.contains("tag1")); assertTrue(tagValues.contains("tag1"));
assertTrue(tagValues.contains("tag2")); assertTrue(tagValues.contains("tag2"));
@ -503,7 +503,7 @@ public class TestTags {
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q)); kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags; tags = TestCoprocessorForTags.tags;
assertEquals(1, tags.size()); assertEquals(1, tags.size());
assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tag2", Bytes.toString(Tag.cloneValue(tags.get(0))));
} finally { } finally {
TestCoprocessorForTags.checkTagPresence = false; TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null; TestCoprocessorForTags.tags = null;
@ -613,8 +613,7 @@ public class TestTags {
CellScanner cellScanner = result.cellScanner(); CellScanner cellScanner = result.cellScanner();
if (cellScanner.advance()) { if (cellScanner.advance()) {
Cell cell = cellScanner.current(); Cell cell = cellScanner.current();
tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), tags = ((RawCell)cell).getTags();
cell.getTagsLength());
} }
} }
} }

View File

@ -28,12 +28,11 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ByteBufferKeyValue;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.codec.Codec.Decoder; import org.apache.hadoop.hbase.codec.Codec.Decoder;
import org.apache.hadoop.hbase.codec.Codec.Encoder; import org.apache.hadoop.hbase.codec.Codec.Encoder;
import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.io.util.LRUDictionary;
@ -84,7 +83,7 @@ public class TestWALCellCodecWithCompression {
KeyValue kv = (KeyValue) decoder.current(); KeyValue kv = (KeyValue) decoder.current();
List<Tag> tags = kv.getTags(); List<Tag> tags = kv.getTags();
assertEquals(1, tags.size()); assertEquals(1, tags.size());
assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tagValue1", Bytes.toString(Tag.cloneValue(tags.get(0))));
decoder.advance(); decoder.advance();
kv = (KeyValue) decoder.current(); kv = (KeyValue) decoder.current();
tags = kv.getTags(); tags = kv.getTags();
@ -93,8 +92,8 @@ public class TestWALCellCodecWithCompression {
kv = (KeyValue) decoder.current(); kv = (KeyValue) decoder.current();
tags = kv.getTags(); tags = kv.getTags();
assertEquals(2, tags.size()); assertEquals(2, tags.size());
assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0)))); assertEquals("tagValue1", Bytes.toString(Tag.cloneValue(tags.get(0))));
assertEquals("tagValue2", Bytes.toString(TagUtil.cloneValue(tags.get(1)))); assertEquals("tagValue2", Bytes.toString(Tag.cloneValue(tags.get(1))));
} }
private KeyValue createKV(int noOfTags) { private KeyValue createKV(int noOfTags) {

View File

@ -29,6 +29,7 @@ import java.util.Optional;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -38,10 +39,9 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -57,10 +57,10 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.ReplicationTests; import org.apache.hadoop.hbase.testclassification.ReplicationTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -252,7 +252,7 @@ public class TestReplicationWithTags {
// Check tag presence in the 1st cell in 1st Result // Check tag presence in the 1st cell in 1st Result
if (!results.isEmpty()) { if (!results.isEmpty()) {
Cell cell = results.get(0); Cell cell = results.get(0);
tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); tags = ((RawCell)cell).getTags();
} }
} }
} }

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.ClusterId;
import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.Server;
@ -328,6 +329,11 @@ public class TestTokenAuthentication {
public Connection createConnection(Configuration conf) throws IOException { public Connection createConnection(Configuration conf) throws IOException {
return null; return null;
} }
@Override
public ExtendedCellBuilder getCellBuilder() {
return null;
}
}); });
started = true; started = true;

View File

@ -35,16 +35,13 @@ import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -63,6 +60,7 @@ import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionN
import org.apache.hadoop.hbase.security.visibility.expression.Operator; import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This is a VisibilityLabelService where labels in Mutation's visibility * This is a VisibilityLabelService where labels in Mutation's visibility
@ -430,7 +428,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
for (Tag tag : deleteVisTags) { for (Tag tag : deleteVisTags) {
matchFound = false; matchFound = false;
for (Tag givenTag : putVisTags) { for (Tag givenTag : putVisTags) {
if (TagUtil.matchingValue(tag, givenTag)) { if (Tag.matchingValue(tag, givenTag)) {
matchFound = true; matchFound = true;
break; break;
} }

View File

@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -181,7 +180,7 @@ public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilit
boolean foundNonVisTag = false; boolean foundNonVisTag = false;
for(Tag t : TestCoprocessorForTagsAtSink.tags) { for(Tag t : TestCoprocessorForTagsAtSink.tags) {
if(t.getType() == NON_VIS_TAG_TYPE) { if(t.getType() == NON_VIS_TAG_TYPE) {
assertEquals(TEMP, Bytes.toString(TagUtil.cloneValue(t))); assertEquals(TEMP, Bytes.toString(Tag.cloneValue(t)));
foundNonVisTag = true; foundNonVisTag = true;
break; break;
} }

View File

@ -32,6 +32,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
@ -40,14 +41,13 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.RawCell;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -65,7 +65,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse; import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
@ -73,6 +72,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SecurityTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.junit.Assert; import org.junit.Assert;
@ -285,11 +285,10 @@ public class TestVisibilityLabelsReplication {
for (Cell cell : cells) { for (Cell cell : cells) {
if ((Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0, if ((Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0,
row.length))) { row.length))) {
List<Tag> tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), List<Tag> tags = ((RawCell)cell).getTags();
cell.getTagsLength());
for (Tag tag : tags) { for (Tag tag : tags) {
if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) { if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) {
assertEquals(visTag, TagUtil.getValueAsString(tag)); assertEquals(visTag, Tag.getValueAsString(tag));
tagFound = true; tagFound = true;
break; break;
} }
@ -331,7 +330,7 @@ public class TestVisibilityLabelsReplication {
boolean foundNonVisTag = false; boolean foundNonVisTag = false;
for (Tag t : TestCoprocessorForTagsAtSink.tags) { for (Tag t : TestCoprocessorForTagsAtSink.tags) {
if (t.getType() == NON_VIS_TAG_TYPE) { if (t.getType() == NON_VIS_TAG_TYPE) {
assertEquals(TEMP, TagUtil.getValueAsString(t)); assertEquals(TEMP, Tag.getValueAsString(t));
foundNonVisTag = true; foundNonVisTag = true;
break; break;
} }
@ -443,7 +442,7 @@ public class TestVisibilityLabelsReplication {
// Check tag presence in the 1st cell in 1st Result // Check tag presence in the 1st cell in 1st Result
if (!results.isEmpty()) { if (!results.isEmpty()) {
Cell cell = results.get(0); Cell cell = results.get(0);
tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); tags = ((RawCell)cell).getTags();
} }
} }
} }

View File

@ -25,19 +25,18 @@ import static org.junit.Assert.fail;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Locale; import java.util.Locale;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
@ -137,8 +136,8 @@ public class HFileTestUtil {
kv = MobUtils.createMobRefCell(kv, key, tableNameTag); kv = MobUtils.createMobRefCell(kv, key, tableNameTag);
// verify that the kv has the tag. // verify that the kv has the tag.
Tag t = PrivateCellUtil.getTag(kv, TagType.MOB_TABLE_NAME_TAG_TYPE); Optional<Tag> tag = PrivateCellUtil.getTag(kv, TagType.MOB_TABLE_NAME_TAG_TYPE);
if (t == null) { if (!tag.isPresent()) {
throw new IllegalStateException("Tag didn't stick to KV " + kv.toString()); throw new IllegalStateException("Tag didn't stick to KV " + kv.toString());
} }
} }
@ -161,12 +160,13 @@ public class HFileTestUtil {
ResultScanner s = table.getScanner(new Scan()); ResultScanner s = table.getScanner(new Scan());
for (Result r : s) { for (Result r : s) {
for (Cell c : r.listCells()) { for (Cell c : r.listCells()) {
Tag t = PrivateCellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE); Optional<Tag> tag = PrivateCellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE);
if (t == null) { if (!tag.isPresent()) {
fail(c.toString() + " has null tag"); fail(c.toString() + " has null tag");
continue; continue;
} }
byte[] tval = TagUtil.cloneValue(t); Tag t = tag.get();
byte[] tval = Tag.cloneValue(t);
assertArrayEquals(c.toString() + " has tag" + Bytes.toString(tval), assertArrayEquals(c.toString() + " has tag" + Bytes.toString(tval),
r.getRow(), tval); r.getRow(), tval);
} }

View File

@ -27,15 +27,14 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.collections4.MapUtils; import org.apache.commons.collections4.MapUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Scan.ReadType; import org.apache.hadoop.hbase.client.Scan.ReadType;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.Authorizations;
import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.security.visibility.CellVisibility;
@ -71,6 +69,7 @@ import org.apache.hadoop.hbase.thrift2.generated.TScan;
import org.apache.hadoop.hbase.thrift2.generated.TServerName; import org.apache.hadoop.hbase.thrift2.generated.TServerName;
import org.apache.hadoop.hbase.thrift2.generated.TTimeRange; import org.apache.hadoop.hbase.thrift2.generated.TTimeRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public class ThriftUtilities { public class ThriftUtilities {
@ -172,7 +171,7 @@ public class ThriftUtilities {
col.setTimestamp(kv.getTimestamp()); col.setTimestamp(kv.getTimestamp());
col.setValue(CellUtil.cloneValue(kv)); col.setValue(CellUtil.cloneValue(kv));
if (kv.getTagsLength() > 0) { if (kv.getTagsLength() > 0) {
col.setTags(PrivateCellUtil.getTagsArray(kv)); col.setTags(PrivateCellUtil.cloneTags(kv));
} }
columnValues.add(col); columnValues.add(col);
} }