HBASE-12593 Tags to work with ByteBuffer.

This commit is contained in:
anoopsjohn 2016-01-06 21:28:06 +05:30
parent 893a54c3a4
commit a9b671b31f
62 changed files with 1083 additions and 581 deletions

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@ -124,7 +125,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
* @param qualifier
* @param ts
* @param value
* @param tags - Specify the Tags as an Array {@link KeyValue.Tag}
* @param tags - Specify the Tags as an Array
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tags) {
@ -138,7 +139,7 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, ByteBuffer qualifier, long ts, ByteBuffer value,
Tag[] tags) {
Tag[] tags) {
return new KeyValue(this.row, 0, this.row == null ? 0 : this.row.length,
family, 0, family == null ? 0 : family.length,
qualifier, ts, KeyValue.Type.Put, value, tags != null ? Arrays.asList(tags) : null);
@ -219,11 +220,11 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
c.getQualifierLength()));
stringMap.put("timestamp", c.getTimestamp());
stringMap.put("vlen", c.getValueLength());
List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
List<Tag> tags = CellUtil.getTags(c);
if (tags != null) {
List<String> tagsString = new ArrayList<String>();
for (Tag t : tags) {
tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(t.getValue()));
tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(t)));
}
stringMap.put("tag", tagsString);
}

View File

@ -53,6 +53,7 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Consistency;
@ -580,20 +581,17 @@ public final class ProtobufUtil {
if (qv.hasTimestamp()) {
ts = qv.getTimestamp();
}
byte[] tags;
byte[] allTagsBytes;
if (qv.hasTags()) {
tags = qv.getTags().toByteArray();
Object[] array = Tag.asList(tags, 0, (short)tags.length).toArray();
Tag[] tagArray = new Tag[array.length];
for(int i = 0; i< array.length; i++) {
tagArray[i] = (Tag)array[i];
}
allTagsBytes = qv.getTags().toByteArray();
if(qv.hasDeleteType()) {
byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null;
put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts,
fromDeleteType(qv.getDeleteType()), null, tags));
fromDeleteType(qv.getDeleteType()), null, allTagsBytes));
} else {
put.addImmutable(family, qualifier, ts, value, tagArray);
List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length);
Tag[] tagsArray = new Tag[tags.size()];
put.addImmutable(family, qualifier, ts, value, tags.toArray(tagsArray));
}
} else {
if(qv.hasDeleteType()) {

View File

@ -0,0 +1,143 @@
/**
* Copyright The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
* This is a {@link Tag} implementation in which value is backed by an on heap byte array.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ArrayBackedTag implements Tag {
private final byte type;// TODO extra type state needed?
private final byte[] bytes;
private int offset = 0;
private int length = 0;
/**
* The special tag will write the length of each tag and that will be
* followed by the type and then the actual tag.
* So every time the length part is parsed we need to add + 1 byte to it to
* get the type and then get the actual tag.
*/
public ArrayBackedTag(byte tagType, String tag) {
this(tagType, Bytes.toBytes(tag));
}
/**
* Format for a tag :
* {@code <length of tag - 2 bytes><type code - 1 byte><tag>} tag length is serialized
* using 2 bytes only but as this will be unsigned, we can have max tag length of
* (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length.
*/
public ArrayBackedTag(byte tagType, byte[] tag) {
int tagLength = tag.length + TYPE_LENGTH_SIZE;
if (tagLength > MAX_TAG_LENGTH) {
throw new IllegalArgumentException(
"Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
}
length = TAG_LENGTH_SIZE + tagLength;
bytes = new byte[length];
int pos = Bytes.putAsShort(bytes, 0, tagLength);
pos = Bytes.putByte(bytes, pos, tagType);
Bytes.putBytes(bytes, pos, tag, 0, tag.length);
this.type = tagType;
}
/**
* Creates a Tag from the specified byte array and offset. Presumes
* <code>bytes</code> content starting at <code>offset</code> is formatted as
* a Tag blob.
* The bytes to include the tag type, tag length and actual tag bytes.
* @param offset offset to start of Tag
*/
public ArrayBackedTag(byte[] bytes, int offset) {
this(bytes, offset, getLength(bytes, offset));
}
private static int getLength(byte[] bytes, int offset) {
return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE);
}
/**
* Creates a Tag from the specified byte array, starting at offset, and for length
* <code>length</code>. Presumes <code>bytes</code> content starting at <code>offset</code> is
* formatted as a Tag blob.
*/
public ArrayBackedTag(byte[] bytes, int offset, int length) {
if (length > MAX_TAG_LENGTH) {
throw new IllegalArgumentException(
"Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
}
this.bytes = bytes;
this.offset = offset;
this.length = length;
this.type = bytes[offset + TAG_LENGTH_SIZE];
}
/**
* @return The byte array backing this Tag.
*/
public byte[] getValueArray() {
return this.bytes;
}
/**
* @return the tag type
*/
public byte getType() {
return this.type;
}
/**
* @return Length of actual tag bytes within the backed buffer
*/
public int getValueLength() {
return this.length - INFRASTRUCTURE_SIZE;
}
/**
* @return Offset of actual tag bytes within the backed buffer
*/
public int getValueOffset() {
return this.offset + INFRASTRUCTURE_SIZE;
}
@Override
public boolean hasArray() {
return true;
}
@Override
public ByteBuffer getValueByteBuffer() {
return ByteBuffer.wrap(bytes);
}
@Override
public String toString() {
return "[Tag type : " + this.type + ", value : "
+ Bytes.toStringBinary(bytes, getValueOffset(), getValueLength()) + "]";
}
}

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.DataOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;
@ -108,8 +110,8 @@ public final class CellUtil {
/**
* Returns tag value in a new byte array. If server-side, use
* {@link Tag#getBuffer()} with appropriate {@link Tag#getTagOffset()} and
* {@link Tag#getTagLength()} instead to save on allocations.
* {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()} and
* {@link Tag#getValueLength()} instead to save on allocations.
* @param cell
* @return tag value in a new byte array.
*/
@ -749,7 +751,10 @@ public final class CellUtil {
* @param offset
* @param length
* @return iterator for the tags
* @deprecated As of 2.0.0 and will be removed in 3.0.0
* Instead use {@link #tagsIterator(Cell)}
*/
@Deprecated
public static Iterator<Tag> tagsIterator(final byte[] tags, final int offset, final int length) {
return new Iterator<Tag>() {
private int pos = offset;
@ -764,7 +769,7 @@ public final class CellUtil {
public Tag next() {
if (hasNext()) {
int curTagLen = Bytes.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE);
Tag tag = new Tag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE);
Tag tag = new ArrayBackedTag(tags, pos, curTagLen + TAG_LENGTH_SIZE);
this.pos += Bytes.SIZEOF_SHORT + curTagLen;
return tag;
}
@ -778,6 +783,115 @@ public final class CellUtil {
};
}
private static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
final int length) {
return new Iterator<Tag>() {
private int pos = offset;
private int endOffset = offset + length - 1;
@Override
public boolean hasNext() {
return this.pos < endOffset;
}
@Override
public Tag next() {
if (hasNext()) {
int curTagLen = ByteBufferUtils.readAsInt(tags, this.pos, Tag.TAG_LENGTH_SIZE);
Tag tag = new OffheapTag(tags, pos, curTagLen + Tag.TAG_LENGTH_SIZE);
this.pos += Bytes.SIZEOF_SHORT + curTagLen;
return tag;
}
return null;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
private static final Iterator<Tag> EMPTY_TAGS_ITR = new Iterator<Tag>() {
@Override
public boolean hasNext() {
return false;
}
@Override
public Tag next() {
return null;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
/**
* Util method to iterate through the tags in the given cell.
*
* @param cell The Cell over which tags iterator is needed.
* @return iterator for the tags
*/
public static Iterator<Tag> tagsIterator(final Cell cell) {
final int tagsLength = cell.getTagsLength();
// Save an object allocation where we can
if (tagsLength == 0) {
return EMPTY_TAGS_ITR;
}
if (cell instanceof ByteBufferedCell) {
return tagsIterator(((ByteBufferedCell) cell).getTagsByteBuffer(),
((ByteBufferedCell) cell).getTagsPosition(), tagsLength);
}
return tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
}
/**
* @param cell The Cell
* @return Tags in the given Cell as a List
*/
public static List<Tag> getTags(Cell cell) {
List<Tag> tags = new ArrayList<Tag>();
Iterator<Tag> tagsItr = tagsIterator(cell);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
/**
* Retrieve Cell's first tag, matching the passed in type
*
* @param cell The Cell
* @param type Type of the Tag to retrieve
* @return null if there is no tag of the passed in tag type
*/
public static Tag getTag(Cell cell, byte type){
boolean bufferBacked = cell instanceof ByteBufferedCell;
int length = cell.getTagsLength();
int offset = bufferBacked? ((ByteBufferedCell)cell).getTagsPosition():cell.getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen;
if (bufferBacked) {
ByteBuffer tagsBuffer = ((ByteBufferedCell)cell).getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return new OffheapTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE);
}
} else {
tagLen = Bytes.readAsInt(cell.getTagsArray(), pos, TAG_LENGTH_SIZE);
if (cell.getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return new ArrayBackedTag(cell.getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE);
}
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return null;
}
/**
* Returns true if the first range start1...end1 overlaps with the second range
* start2...end2, assuming the byte arrays represent row keys

View File

@ -894,7 +894,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
int tagsLength = 0;
if (tags != null && tags.length > 0) {
for (Tag t: tags) {
tagsLength += t.getLength();
tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
}
checkForTagsLength(tagsLength);
@ -928,7 +928,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tagsLength > 0) {
pos = Bytes.putAsShort(buffer, pos, tagsLength);
for (Tag t : tags) {
pos = Bytes.putBytes(buffer, pos, t.getBuffer(), t.getOffset(), t.getLength());
int tlen = t.getValueLength();
pos = Bytes.putAsShort(buffer, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(buffer, pos, t.getType());
TagUtil.copyValueTo(t, buffer, pos);
pos += tlen;
}
}
return keyValueLength;
@ -1013,7 +1017,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
int tagsLength = 0;
if (tags != null && !tags.isEmpty()) {
for (Tag t : tags) {
tagsLength += t.getLength();
tagsLength += t.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
}
checkForTagsLength(tagsLength);
@ -1053,7 +1057,11 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tagsLength > 0) {
pos = Bytes.putAsShort(bytes, pos, tagsLength);
for (Tag t : tags) {
pos = Bytes.putBytes(bytes, pos, t.getBuffer(), t.getOffset(), t.getLength());
int tlen = t.getValueLength();
pos = Bytes.putAsShort(bytes, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(bytes, pos, t.getType());
TagUtil.copyValueTo(t, bytes, pos);
pos += tlen;
}
}
return bytes;
@ -1176,7 +1184,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tags != null) {
List<String> tagsString = new ArrayList<String>();
for (Tag t : tags) {
tagsString.add((t.getType()) + ":" +Bytes.toStringBinary(t.getValue()));
tagsString.add((t.getType()) + ":" + TagUtil.getValueAsString(t));
}
stringMap.put("tag", tagsString);
}
@ -1558,7 +1566,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
if (tagsLength == 0) {
return EMPTY_ARRAY_LIST;
}
return Tag.asList(getTagsArray(), getTagsOffset(), tagsLength);
return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength);
}
/**
@ -2386,7 +2394,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
public static KeyValue cloneAndAddTags(Cell c, List<Tag> newTags) {
List<Tag> existingTags = null;
if(c.getTagsLength() > 0) {
existingTags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
existingTags = CellUtil.getTags(c);
existingTags.addAll(newTags);
} else {
existingTags = newTags;

View File

@ -0,0 +1,83 @@
/**
* Copyright The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
/**
* This is a {@link Tag} implementation in which value is backed by an off heap
* {@link java.nio.ByteBuffer}
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class OffheapTag implements Tag {
private ByteBuffer buffer;
private int offset, length;
private byte type;
public OffheapTag(ByteBuffer buffer, int offset, int length) {
this.buffer = buffer;
this.offset = offset;
this.length = length;
this.type = ByteBufferUtils.toByte(buffer, offset + TAG_LENGTH_SIZE);
}
@Override
public byte getType() {
return this.type;
}
@Override
public int getValueOffset() {
return this.offset + INFRASTRUCTURE_SIZE;
}
@Override
public int getValueLength() {
return this.length - INFRASTRUCTURE_SIZE;
}
@Override
public boolean hasArray() {
return false;
}
@Override
public byte[] getValueArray() {
throw new UnsupportedOperationException(
"Tag is backed by an off heap buffer. Use getValueByteBuffer()");
}
@Override
public ByteBuffer getValueByteBuffer() {
return this.buffer;
}
@Override
public String toString() {
return "[Tag type : " + this.type + ", value : "
+ ByteBufferUtils.toStringBinary(buffer, getValueOffset(), getValueLength()) + "]";
}
}

View File

@ -19,201 +19,60 @@
*/
package org.apache.hadoop.hbase;
import java.util.ArrayList;
import java.util.List;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Tags are part of cells and helps to add metadata about the KVs.
* Metadata could be ACLs per cells, visibility labels, etc.
* Tags are part of cells and helps to add metadata about them.
* Metadata could be ACLs, visibility labels, etc.
* <p>
* Each Tag is having a type (one byte) and value part. The max value length for a Tag is 65533.
* <p>
* See {@link TagType} for reserved tag types.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class Tag {
public interface Tag {
public final static int TYPE_LENGTH_SIZE = Bytes.SIZEOF_BYTE;
public final static int TAG_LENGTH_SIZE = Bytes.SIZEOF_SHORT;
public final static int INFRASTRUCTURE_SIZE = TYPE_LENGTH_SIZE + TAG_LENGTH_SIZE;
public static final int MAX_TAG_LENGTH = (2 * Short.MAX_VALUE) + 1 - TAG_LENGTH_SIZE;
private final byte type;
private final byte[] bytes;
private int offset = 0;
private int length = 0;
/**
* The special tag will write the length of each tag and that will be
* followed by the type and then the actual tag.
* So every time the length part is parsed we need to add + 1 byte to it to
* get the type and then get the actual tag.
*/
public Tag(byte tagType, String tag) {
this(tagType, Bytes.toBytes(tag));
}
/**
* Format for a tag :
* {@code <length of tag - 2 bytes><type code - 1 byte><tag>} tag length is serialized
* using 2 bytes only but as this will be unsigned, we can have max tag length of
* (Short.MAX_SIZE * 2) +1. It includes 1 byte type length and actual tag bytes length.
*/
public Tag(byte tagType, byte[] tag) {
int tagLength = tag.length + TYPE_LENGTH_SIZE;
if (tagLength > MAX_TAG_LENGTH) {
throw new IllegalArgumentException(
"Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
}
length = TAG_LENGTH_SIZE + tagLength;
bytes = new byte[length];
int pos = Bytes.putAsShort(bytes, 0, tagLength);
pos = Bytes.putByte(bytes, pos, tagType);
Bytes.putBytes(bytes, pos, tag, 0, tag.length);
this.type = tagType;
}
/**
* Creates a Tag from the specified byte array and offset. Presumes
* <code>bytes</code> content starting at <code>offset</code> is formatted as
* a Tag blob.
* The bytes to include the tag type, tag length and actual tag bytes.
* @param offset offset to start of Tag
*/
public Tag(byte[] bytes, int offset) {
this(bytes, offset, getLength(bytes, offset));
}
private static int getLength(byte[] bytes, int offset) {
return TAG_LENGTH_SIZE + Bytes.readAsInt(bytes, offset, TAG_LENGTH_SIZE);
}
/**
* Creates a Tag from the specified byte array, starting at offset, and for length
* <code>length</code>. Presumes <code>bytes</code> content starting at <code>offset</code> is
* formatted as a Tag blob.
*/
public Tag(byte[] bytes, int offset, int length) {
if (length > MAX_TAG_LENGTH) {
throw new IllegalArgumentException(
"Invalid tag data being passed. Its length can not exceed " + MAX_TAG_LENGTH);
}
this.bytes = bytes;
this.offset = offset;
this.length = length;
this.type = bytes[offset + TAG_LENGTH_SIZE];
}
/**
* @return The byte array backing this Tag.
*/
public byte[] getBuffer() {
return this.bytes;
}
/**
* @return the tag type
*/
public byte getType() {
return this.type;
}
byte getType();
/**
* @return Length of actual tag bytes within the backed buffer
* @return Offset of tag value within the backed buffer
*/
public int getTagLength() {
return this.length - INFRASTRUCTURE_SIZE;
}
int getValueOffset();
/**
* @return Offset of actual tag bytes within the backed buffer
* @return Length of tag value within the backed buffer
*/
public int getTagOffset() {
return this.offset + INFRASTRUCTURE_SIZE;
}
int getValueLength();
/**
* Returns tag value in a new byte array.
* Primarily for use client-side. If server-side, use
* {@link #getBuffer()} with appropriate {@link #getTagOffset()} and {@link #getTagLength()}
* instead to save on allocations.
* @return tag value in a new byte array.
* Tells whether or not this Tag is backed by a byte array.
* @return true when this Tag is backed by byte array
*/
public byte[] getValue() {
int tagLength = getTagLength();
byte[] tag = new byte[tagLength];
Bytes.putBytes(tag, 0, bytes, getTagOffset(), tagLength);
return tag;
}
boolean hasArray();
/**
* Creates the list of tags from the byte array b. Expected that b is in the
* expected tag format
* @param b
* @param offset
* @param length
* @return List of tags
* @return The array containing the value bytes.
* @throws UnsupportedOperationException
* when {@link #hasArray()} return false. Use {@link #getValueByteBuffer()} in such
* situation
*/
public static List<Tag> asList(byte[] b, int offset, int length) {
List<Tag> tags = new ArrayList<Tag>();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
tags.add(new Tag(b, pos, tagLen + TAG_LENGTH_SIZE));
pos += TAG_LENGTH_SIZE + tagLen;
}
return tags;
}
byte[] getValueArray();
/**
* Write a list of tags into a byte array
* @param tags
* @return the serialized tag data as bytes
* @return The {@link java.nio.ByteBuffer} containing the value bytes.
*/
public static byte[] fromList(List<Tag> tags) {
int length = 0;
for (Tag tag: tags) {
length += tag.length;
}
byte[] b = new byte[length];
int pos = 0;
for (Tag tag: tags) {
System.arraycopy(tag.bytes, tag.offset, b, pos, tag.length);
pos += tag.length;
}
return b;
}
/**
* Retrieve the first tag from the tags byte array matching the passed in tag type
* @param b
* @param offset
* @param length
* @param type
* @return null if there is no tag of the passed in tag type
*/
public static Tag getTag(byte[] b, int offset, int length, byte type) {
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
if(b[pos + TAG_LENGTH_SIZE] == type) {
return new Tag(b, pos, tagLen + TAG_LENGTH_SIZE);
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return null;
}
/**
* Returns the total length of the entire tag entity
*/
int getLength() {
return this.length;
}
/**
* Returns the offset of the entire tag entity
*/
int getOffset() {
return this.offset;
}
ByteBuffer getValueByteBuffer();
}

View File

@ -0,0 +1,219 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.util.StreamUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
@InterfaceAudience.Private
public final class TagUtil {
/**
* Private constructor to keep this class from being instantiated.
*/
private TagUtil(){}
/**
* Returns tag value in a new byte array.
* Primarily for use client-side. If server-side, use
* {@link Tag#getValueArray()} with appropriate {@link Tag#getValueOffset()}
* and {@link Tag#getValueLength()} instead to save on allocations.
*
* @param tag The Tag whose value to be returned
* @return tag value in a new byte array.
*/
public static byte[] cloneValue(Tag tag) {
int tagLength = tag.getValueLength();
byte[] tagArr = new byte[tagLength];
if (tag.hasArray()) {
Bytes.putBytes(tagArr, 0, tag.getValueArray(), tag.getValueOffset(), tagLength);
} else {
ByteBufferUtils.copyFromBufferToArray(tagArr, tag.getValueByteBuffer(), tag.getValueOffset(),
0, tagLength);
}
return tagArr;
}
/**
* Creates list of tags from given byte array, expected that it is in the expected tag format.
*
* @param b The byte array
* @param offset The offset in array where tag bytes begin
* @param length Total length of all tags bytes
* @return List of tags
*/
public static List<Tag> asList(byte[] b, int offset, int length) {
List<Tag> tags = new ArrayList<Tag>();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(b, pos, TAG_LENGTH_SIZE);
tags.add(new ArrayBackedTag(b, pos, tagLen + TAG_LENGTH_SIZE));
pos += TAG_LENGTH_SIZE + tagLen;
}
return tags;
}
/**
* Creates list of tags from given ByteBuffer, expected that it is in the expected tag format.
*
* @param b The ByteBuffer
* @param offset The offset in ByteBuffer where tag bytes begin
* @param length Total length of all tags bytes
* @return List of tags
*/
public static List<Tag> asList(ByteBuffer b, int offset, int length) {
List<Tag> tags = new ArrayList<Tag>();
int pos = offset;
while (pos < offset + length) {
int tagLen = ByteBufferUtils.readAsInt(b, pos, TAG_LENGTH_SIZE);
tags.add(new OffheapTag(b, pos, tagLen + TAG_LENGTH_SIZE));
pos += TAG_LENGTH_SIZE + tagLen;
}
return tags;
}
/**
* Write a list of tags into a byte array
*
* @param tags The list of tags
* @return the serialized tag data as bytes
*/
public static byte[] fromList(List<Tag> tags) {
if (tags.isEmpty()) {
return HConstants.EMPTY_BYTE_ARRAY;
}
int length = 0;
for (Tag tag : tags) {
length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
byte[] b = new byte[length];
int pos = 0;
int tlen;
for (Tag tag : tags) {
tlen = tag.getValueLength();
pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(b, pos, tag.getType());
if (tag.hasArray()) {
pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
} else {
ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
pos, tlen);
pos += tlen;
}
}
return b;
}
/**
* Converts the value bytes of the given tag into a long value
* @param tag The Tag
* @return value as long
*/
public static long getValueAsLong(Tag tag) {
if (tag.hasArray()) {
return Bytes.toLong(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
}
return ByteBufferUtils.toLong(tag.getValueByteBuffer(), tag.getValueOffset());
}
/**
* Converts the value bytes of the given tag into a byte value
* @param tag The Tag
* @return value as byte
*/
public static byte getValueAsByte(Tag tag) {
if (tag.hasArray()) {
return tag.getValueArray()[tag.getValueOffset()];
}
return ByteBufferUtils.toByte(tag.getValueByteBuffer(), tag.getValueOffset());
}
/**
* Converts the value bytes of the given tag into a String value
* @param tag The Tag
* @return value as String
*/
public static String getValueAsString(Tag tag){
if(tag.hasArray()){
return Bytes.toString(tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
}
return Bytes.toString(cloneValue(tag));
}
/**
* Matches the value part of given tags
* @param t1 Tag to match the value
* @param t2 Tag to match the value
* @return True if values of both tags are same.
*/
public static boolean matchingValue(Tag t1, Tag t2) {
if (t1.hasArray() && t2.hasArray()) {
return Bytes.equals(t1.getValueArray(), t1.getValueOffset(), t1.getValueLength(),
t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
}
if (t1.hasArray()) {
return ByteBufferUtils.equals(t2.getValueByteBuffer(), t2.getValueOffset(),
t2.getValueLength(), t1.getValueArray(), t1.getValueOffset(), t1.getValueLength());
}
if (t2.hasArray()) {
return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(),
t1.getValueLength(), t2.getValueArray(), t2.getValueOffset(), t2.getValueLength());
}
return ByteBufferUtils.equals(t1.getValueByteBuffer(), t1.getValueOffset(), t1.getValueLength(),
t2.getValueByteBuffer(), t2.getValueOffset(), t2.getValueLength());
}
/**
* Copies the tag's value bytes to the given byte array
* @param tag The Tag
* @param out The byte array where to copy the Tag value.
* @param offset The offset within 'out' array where to copy the Tag value.
*/
public static void copyValueTo(Tag tag, byte[] out, int offset) {
if (tag.hasArray()) {
Bytes.putBytes(out, offset, tag.getValueArray(), tag.getValueOffset(), tag.getValueLength());
} else {
ByteBufferUtils.copyFromBufferToArray(out, tag.getValueByteBuffer(), tag.getValueOffset(),
offset, tag.getValueLength());
}
}
/**
* Reads an int value stored as a VInt at tag's given offset.
* @param tag The Tag
* @param offset The offset where VInt bytes begin
* @return A pair of the int value and number of bytes taken to store VInt
* @throws IOException When varint is malformed and not able to be read correctly
*/
public static Pair<Integer, Integer> readVIntValuePart(Tag tag, int offset) throws IOException {
if (tag.hasArray()) {
return StreamUtils.readRawVarint32(tag.getValueArray(), offset);
}
return StreamUtils.readRawVarint32(tag.getValueByteBuffer(), offset);
}
}

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.nio.ByteBuff;
@ -127,9 +128,10 @@ public class StreamUtils {
* Offset in the input array where varInt is available
* @return A pair of integers in which first value is the actual decoded varInt value and second
* value as number of bytes taken by this varInt for it's storage in the input array.
* @throws IOException
* @throws IOException When varint is malformed and not able to be read correctly
*/
public static Pair<Integer, Integer> readRawVarint32(byte[] input, int offset) throws IOException {
public static Pair<Integer, Integer> readRawVarint32(byte[] input, int offset)
throws IOException {
int newOffset = offset;
byte tmp = input[newOffset++];
if (tmp >= 0) {
@ -169,6 +171,47 @@ public class StreamUtils {
return new Pair<Integer, Integer>(result, newOffset - offset);
}
public static Pair<Integer, Integer> readRawVarint32(ByteBuffer input, int offset)
throws IOException {
int newOffset = offset;
byte tmp = input.get(newOffset++);
if (tmp >= 0) {
return new Pair<Integer, Integer>((int) tmp, newOffset - offset);
}
int result = tmp & 0x7f;
tmp = input.get(newOffset++);
if (tmp >= 0) {
result |= tmp << 7;
} else {
result |= (tmp & 0x7f) << 7;
tmp = input.get(newOffset++);
if (tmp >= 0) {
result |= tmp << 14;
} else {
result |= (tmp & 0x7f) << 14;
tmp = input.get(newOffset++);
if (tmp >= 0) {
result |= tmp << 21;
} else {
result |= (tmp & 0x7f) << 21;
tmp = input.get(newOffset++);
result |= tmp << 28;
if (tmp < 0) {
// Discard upper 32 bits.
for (int i = 0; i < 5; i++) {
tmp = input.get(newOffset++);
if (tmp >= 0) {
return new Pair<Integer, Integer>(result, newOffset - offset);
}
}
throw new IOException("Malformed varint");
}
}
}
}
return new Pair<Integer, Integer>(result, newOffset - offset);
}
public static short toShort(byte hi, byte lo) {
short s = (short) (((hi & 0xFF) << 8) | (lo & 0xFF));
Preconditions.checkArgument(s >= 0);

View File

@ -750,6 +750,29 @@ public final class ByteBufferUtils {
}
}
/**
* Converts a ByteBuffer to an int value
*
* @param buf The ByteBuffer
* @param offset Offset to int value
* @param length Number of bytes used to store the int value.
* @return the int value
* @throws IllegalArgumentException
* if there's not enough bytes left in the buffer after the given offset
*/
public static int readAsInt(ByteBuffer buf, int offset, final int length) {
if (offset + length > buf.limit()) {
throw new IllegalArgumentException("offset (" + offset + ") + length (" + length
+ ") exceed the" + " limit of the buffer: " + buf.limit());
}
int n = 0;
for(int i = offset; i < (offset + length); i++) {
n <<= 8;
n ^= toByte(buf, i) & 0xFF;
}
return n;
}
/**
* Reads a long value at the given buffer's offset.
* @param buffer

View File

@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
@ -280,8 +281,8 @@ public class RedundantKVGenerator {
}
if (useTags) {
result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag(
(byte) 1, "value1") }));
result.add(new KeyValue(row, family, qualifier, timestamp, value,
new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
} else {
result.add(new KeyValue(row, family, qualifier, timestamp, value));
}
@ -365,7 +366,7 @@ public class RedundantKVGenerator {
}
if (useTags) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, value,
new Tag[] { new Tag((byte) 1, "value1") });
new Tag[] { new ArrayBackedTag((byte) 1, "value1") });
ByteBuffer offheapKVBB = ByteBuffer.allocateDirect(keyValue.getLength());
ByteBufferUtils.copyFromArrayToBuffer(offheapKVBB, keyValue.getBuffer(),
keyValue.getOffset(), keyValue.getLength());

View File

@ -442,7 +442,7 @@ public class TestKeyValue extends TestCase {
byte[] metaValue1 = Bytes.toBytes("metaValue1");
byte[] metaValue2 = Bytes.toBytes("metaValue2");
KeyValue kv = new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, new Tag[] {
new Tag((byte) 1, metaValue1), new Tag((byte) 2, metaValue2) });
new ArrayBackedTag((byte) 1, metaValue1), new ArrayBackedTag((byte) 2, metaValue2) });
assertTrue(kv.getTagsLength() > 0);
assertTrue(Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), row, 0,
row.length));
@ -458,44 +458,42 @@ public class TestKeyValue extends TestCase {
boolean meta1Ok = false, meta2Ok = false;
for (Tag tag : tags) {
if (tag.getType() == (byte) 1) {
if (Bytes.equals(tag.getValue(), metaValue1)) {
if (Bytes.equals(TagUtil.cloneValue(tag), metaValue1)) {
meta1Ok = true;
}
} else {
if (Bytes.equals(tag.getValue(), metaValue2)) {
if (Bytes.equals(TagUtil.cloneValue(tag), metaValue2)) {
meta2Ok = true;
}
}
}
assertTrue(meta1Ok);
assertTrue(meta2Ok);
Iterator<Tag> tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(),
kv.getTagsLength());
Iterator<Tag> tagItr = CellUtil.tagsIterator(kv);
//Iterator<Tag> tagItr = kv.tagsIterator();
assertTrue(tagItr.hasNext());
Tag next = tagItr.next();
assertEquals(10, next.getTagLength());
assertEquals(10, next.getValueLength());
assertEquals((byte) 1, next.getType());
Bytes.equals(next.getValue(), metaValue1);
Bytes.equals(TagUtil.cloneValue(next), metaValue1);
assertTrue(tagItr.hasNext());
next = tagItr.next();
assertEquals(10, next.getTagLength());
assertEquals(10, next.getValueLength());
assertEquals((byte) 2, next.getType());
Bytes.equals(next.getValue(), metaValue2);
Bytes.equals(TagUtil.cloneValue(next), metaValue2);
assertFalse(tagItr.hasNext());
tagItr = CellUtil.tagsIterator(kv.getTagsArray(), kv.getTagsOffset(),
kv.getTagsLength());
tagItr = CellUtil.tagsIterator(kv);
assertTrue(tagItr.hasNext());
next = tagItr.next();
assertEquals(10, next.getTagLength());
assertEquals(10, next.getValueLength());
assertEquals((byte) 1, next.getType());
Bytes.equals(next.getValue(), metaValue1);
Bytes.equals(TagUtil.cloneValue(next), metaValue1);
assertTrue(tagItr.hasNext());
next = tagItr.next();
assertEquals(10, next.getTagLength());
assertEquals(10, next.getValueLength());
assertEquals((byte) 2, next.getType());
Bytes.equals(next.getValue(), metaValue2);
Bytes.equals(TagUtil.cloneValue(next), metaValue2);
assertFalse(tagItr.hasNext());
}

View File

@ -43,8 +43,8 @@ public class TestOffheapKeyValue {
private static final byte[] fam2 = Bytes.toBytes(FAM2);
private static final byte[] qual1 = Bytes.toBytes(QUAL1);
private static final byte[] qual2 = Bytes.toBytes(QUAL2);
private static final Tag t1 = new Tag((byte) 1, Bytes.toBytes("TAG1"));
private static final Tag t2 = new Tag((byte) 2, Bytes.toBytes("TAG2"));
private static final Tag t1 = new ArrayBackedTag((byte) 1, Bytes.toBytes("TAG1"));
private static final Tag t2 = new ArrayBackedTag((byte) 2, Bytes.toBytes("TAG2"));
private static final ArrayList<Tag> tags = new ArrayList<Tag>();
static {
tags.add(t1);
@ -158,17 +158,17 @@ public class TestOffheapKeyValue {
assertEquals(0L, offheapKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKV.getTypeByte());
// change tags to handle both onheap and offheap stuff
List<Tag> resTags =
Tag.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(), offheapKV.getTagsLength());
List<Tag> resTags = TagUtil.asList(offheapKV.getTagsArray(), offheapKV.getTagsOffset(),
offheapKV.getTagsLength());
Tag tag1 = resTags.get(0);
assertEquals(t1.getType(), tag1.getType());
assertEquals(Bytes.toString(t1.getValue()), Bytes.toString(getTagValue(tag1)));
assertEquals(TagUtil.getValueAsString(t1), TagUtil.getValueAsString(tag1));
Tag tag2 = resTags.get(1);
assertEquals(tag2.getType(), tag2.getType());
assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2)));
Tag res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 2);
assertEquals(Bytes.toString(t2.getValue()), Bytes.toString(getTagValue(tag2)));
res = Tag.getTag(offheapKV.getTagsArray(), 0, offheapKV.getTagsLength(), (byte) 3);
assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2));
Tag res = CellUtil.getTag(offheapKV, (byte) 2);
assertEquals(TagUtil.getValueAsString(t2), TagUtil.getValueAsString(tag2));
res = CellUtil.getTag(offheapKV, (byte) 3);
assertNull(res);
}
@ -195,11 +195,4 @@ public class TestOffheapKeyValue {
assertEquals(0L, offheapKeyOnlyKV.getTimestamp());
assertEquals(Type.Put.getCode(), offheapKeyOnlyKV.getTypeByte());
}
// TODO : Can be moved to TagUtil
private static byte[] getTagValue(Tag tag) {
int tagLength = tag.getTagLength();
byte[] tagBytes = new byte[tagLength];
System.arraycopy(tag.getBuffer(), tag.getTagOffset(), tagBytes, 0, tagLength);
return tagBytes;
}
}

View File

@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@ -54,16 +56,16 @@ public class TestCellCodecWithTags {
Codec.Encoder encoder = codec.getEncoder(dos);
final Cell cell1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] {
new Tag((byte) 1, Bytes.toBytes("teststring1")),
new Tag((byte) 2, Bytes.toBytes("teststring2")) });
new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")),
new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) });
final Cell cell2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new Tag((byte) 1,
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1,
Bytes.toBytes("teststring3")), });
final Cell cell3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] {
new Tag((byte) 2, Bytes.toBytes("teststring4")),
new Tag((byte) 2, Bytes.toBytes("teststring5")),
new Tag((byte) 1, Bytes.toBytes("teststring6")) });
new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")),
new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")),
new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) });
encoder.write(cell1);
encoder.write(cell2);
@ -77,36 +79,36 @@ public class TestCellCodecWithTags {
assertTrue(decoder.advance());
Cell c = decoder.current();
assertTrue(CellUtil.equals(c, cell1));
List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(2, tags.size());
Tag tag = tags.get(0);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, cell2));
tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(1, tags.size());
tag = tags.get(0);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, cell3));
tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(3, tags.size());
tag = tags.get(0);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag)));
tag = tags.get(2);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag)));
assertFalse(decoder.advance());
dis.close();
assertEquals(offset, cis.getCount());

View File

@ -33,6 +33,8 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@ -54,16 +56,16 @@ public class TestKeyValueCodecWithTags {
Codec.Encoder encoder = codec.getEncoder(dos);
final KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1"), new Tag[] {
new Tag((byte) 1, Bytes.toBytes("teststring1")),
new Tag((byte) 2, Bytes.toBytes("teststring2")) });
new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring1")),
new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring2")) });
final KeyValue kv2 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new Tag((byte) 1,
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2"), new Tag[] { new ArrayBackedTag((byte) 1,
Bytes.toBytes("teststring3")), });
final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"),
HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3"), new Tag[] {
new Tag((byte) 2, Bytes.toBytes("teststring4")),
new Tag((byte) 2, Bytes.toBytes("teststring5")),
new Tag((byte) 1, Bytes.toBytes("teststring6")) });
new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring4")),
new ArrayBackedTag((byte) 2, Bytes.toBytes("teststring5")),
new ArrayBackedTag((byte) 1, Bytes.toBytes("teststring6")) });
encoder.write(kv1);
encoder.write(kv2);
@ -77,36 +79,36 @@ public class TestKeyValueCodecWithTags {
assertTrue(decoder.advance());
Cell c = decoder.current();
assertTrue(CellUtil.equals(c, kv1));
List<Tag> tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
List<Tag> tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(2, tags.size());
Tag tag = tags.get(0);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring1"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring2"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, kv2));
tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(1, tags.size());
tag = tags.get(0);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring3"), TagUtil.cloneValue(tag)));
assertTrue(decoder.advance());
c = decoder.current();
assertTrue(CellUtil.equals(c, kv3));
tags = Tag.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
tags = TagUtil.asList(c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
assertEquals(3, tags.size());
tag = tags.get(0);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring4"), TagUtil.cloneValue(tag)));
tag = tags.get(1);
assertEquals(2, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring5"), TagUtil.cloneValue(tag)));
tag = tags.get(2);
assertEquals(1, tag.getType());
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), tag.getValue()));
assertTrue(Bytes.equals(Bytes.toBytes("teststring6"), TagUtil.cloneValue(tag)));
assertFalse(decoder.advance());
dis.close();
assertEquals(offset, cis.getCount());

View File

@ -28,6 +28,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.hbase.testclassification.MiscTests;
@ -97,7 +98,7 @@ public class TestTagCompressionContext {
private KeyValue createKVWithTags(int noOfTags) {
List<Tag> tags = new ArrayList<Tag>();
for (int i = 0; i < noOfTags; i++) {
tags.add(new Tag((byte) i, "tagValue" + i));
tags.add(new ArrayBackedTag((byte) i, "tagValue" + i));
}
KeyValue kv = new KeyValue(ROW, CF, Q, 1234L, V, tags);
return kv;

View File

@ -22,6 +22,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Assert;
@ -65,7 +66,7 @@ public class TestByteRangeWithKVSerialization {
int kvCount = 1000000;
List<KeyValue> kvs = new ArrayList<KeyValue>(kvCount);
int totalSize = 0;
Tag[] tags = new Tag[] { new Tag((byte) 1, "tag1") };
Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 1, "tag1") };
for (int i = 0; i < kvCount; i++) {
KeyValue kv = new KeyValue(Bytes.toBytes(i), FAMILY, QUALIFIER, i, VALUE, tags);
kv.setSequenceId(i);

View File

@ -23,6 +23,7 @@ import java.util.List;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData;
import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition;
@ -46,9 +47,9 @@ public class TestRowDataTrivialWithTags extends BaseTestRowData{
static List<KeyValue> d = Lists.newArrayList();
static {
List<Tag> tagList = new ArrayList<Tag>();
Tag t = new Tag((byte) 1, "visisbility");
Tag t = new ArrayBackedTag((byte) 1, "visisbility");
tagList.add(t);
t = new Tag((byte) 2, "ACL");
t = new ArrayBackedTag((byte) 2, "ACL");
tagList.add(t);
d.add(new KeyValue(rA, cf, cq0, ts, v0, tagList));
d.add(new KeyValue(rB, cf, cq0, ts, v0, tagList));

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -1124,7 +1125,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
byte[] tag = generateData(this.rand, TAG_LENGTH);
Tag[] tags = new Tag[noOfTags];
for (int n = 0; n < noOfTags; n++) {
Tag t = new Tag((byte) n, tag);
Tag t = new ArrayBackedTag((byte) n, tag);
tags[n] = t;
}
KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP,
@ -1195,7 +1196,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
byte[] tag = generateData(this.rand, TAG_LENGTH);
Tag[] tags = new Tag[noOfTags];
for (int n = 0; n < noOfTags; n++) {
Tag t = new Tag((byte) n, tag);
Tag t = new ArrayBackedTag((byte) n, tag);
tags[n] = t;
}
KeyValue kv = new KeyValue(row, FAMILY_NAME, QUALIFIER_NAME, HConstants.LATEST_TIMESTAMP,

View File

@ -59,10 +59,11 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
import org.apache.hadoop.hbase.mob.MobUtils;
@ -367,11 +368,10 @@ public class HFilePrettyPrinter extends Configured implements Tool {
+ Bytes.toStringBinary(cell.getValueArray(), cell.getValueOffset(),
cell.getValueLength()));
int i = 0;
List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
List<Tag> tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
for (Tag tag : tags) {
System.out.print(String.format(" T[%d]: %s", i++,
Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength())));
System.out.print(String.format(" T[%d]: %s", i++, TagUtil.getValueAsString(tag)));
}
}
System.out.println();
@ -411,7 +411,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
System.err.println("ERROR, wrong value format in mob reference cell "
+ CellUtil.getCellKeyAsString(cell));
} else {
TableName tn = TableName.valueOf(tnTag.getValue());
TableName tn = TableName.valueOf(TagUtil.cloneValue(tnTag));
String mobFileName = MobUtils.getMobFileName(cell);
boolean exist = mobFileExists(fs, tn, mobFileName,
Bytes.toString(CellUtil.cloneFamily(cell)), foundMobFiles, missingMobFiles);

View File

@ -25,6 +25,7 @@ import java.util.Set;
import java.util.TreeSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue;
@ -169,7 +170,7 @@ public class TextSortReducer extends
// Add TTL directly to the KV so we can vary them when packing more than one KV
// into puts
if (ttl > 0) {
tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
}
for (int i = 0; i < parsed.getColumnCount(); i++) {
if (i == parser.getRowKeyColumnIndex() || i == parser.getTimestampKeyColumnIndex()

View File

@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
@ -170,7 +171,7 @@ extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put>
// Add TTL directly to the KV so we can vary them when packing more than one KV
// into puts
if (ttl > 0) {
tags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(ttl)));
}
}
Put put = new Put(rowKey.copyBytes());

View File

@ -26,6 +26,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
@ -167,7 +168,8 @@ public class DefaultMobStoreCompactor extends DefaultCompactor {
byte[] fileName = null;
StoreFile.Writer mobFileWriter = null, delFileWriter = null;
long mobCells = 0, deleteMarkersCount = 0;
Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName());
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
store.getTableName().getName());
long cellsCountCompactedToMob = 0, cellsCountCompactedFromMob = 0;
long cellsSizeCompactedToMob = 0, cellsSizeCompactedFromMob = 0;
try {

View File

@ -27,6 +27,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
@ -166,8 +167,8 @@ public class DefaultMobStoreFlusher extends DefaultStoreFlusher {
// the relative path is mobFiles
byte[] fileName = Bytes.toBytes(mobFileWriter.getPath().getName());
try {
Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName()
.getName());
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
store.getTableName().getName());
List<Cell> cells = new ArrayList<Cell>();
boolean hasMore;
ScannerContext scannerContext =

View File

@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.mob;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
@ -66,7 +67,7 @@ public final class MobConstants {
public static final String MOB_CACHE_EVICT_PERIOD = "hbase.mob.cache.evict.period";
public static final String MOB_CACHE_EVICT_REMAIN_RATIO = "hbase.mob.cache.evict.remain.ratio";
public static final Tag MOB_REF_TAG = new Tag(TagType.MOB_REFERENCE_TAG_TYPE,
public static final Tag MOB_REF_TAG = new ArrayBackedTag(TagType.MOB_REFERENCE_TAG_TYPE,
HConstants.EMPTY_BYTE_ARRAY);
public static final float DEFAULT_EVICT_REMAIN_RATIO = 0.5f;

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -122,8 +123,7 @@ public final class MobUtils {
*/
public static boolean isMobReferenceCell(Cell cell) {
if (cell.getTagsLength() > 0) {
Tag tag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(),
TagType.MOB_REFERENCE_TAG_TYPE);
Tag tag = CellUtil.getTag(cell, TagType.MOB_REFERENCE_TAG_TYPE);
return tag != null;
}
return false;
@ -136,9 +136,7 @@ public final class MobUtils {
*/
public static Tag getTableNameTag(Cell cell) {
if (cell.getTagsLength() > 0) {
Tag tag = Tag.getTag(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength(),
TagType.MOB_TABLE_NAME_TAG_TYPE);
return tag;
return CellUtil.getTag(cell, TagType.MOB_TABLE_NAME_TAG_TYPE);
}
return null;
}
@ -438,7 +436,7 @@ public final class MobUtils {
// snapshot for mob files.
tags.add(tableNameTag);
// Add the existing tags.
tags.addAll(Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()));
tags.addAll(CellUtil.getTags(cell));
int valueLength = cell.getValueLength();
byte[] refValue = Bytes.add(Bytes.toBytes(valueLength), fileName);
KeyValue reference = new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HColumnDescriptor;
@ -113,7 +114,7 @@ public class PartitionedMobCompactor extends MobCompactor {
Configuration copyOfConf = new Configuration(conf);
copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f);
compactionCacheConfig = new CacheConfig(copyOfConf);
tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName());
tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName());
cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column);
}

View File

@ -25,6 +25,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@ -156,8 +157,8 @@ public class MemStoreWrapper {
scanner = snapshot.getScanner();
scanner.seek(KeyValueUtil.createFirstOnRow(HConstants.EMPTY_START_ROW));
cell = null;
Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, Bytes.toBytes(this.table.getName()
.toString()));
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
Bytes.toBytes(this.table.getName().toString()));
long updatedCount = 0;
while (null != (cell = scanner.next())) {
KeyValue reference = MobUtils.createMobRefKeyValue(cell, referenceValue, tableNameTag);

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
@ -338,8 +339,7 @@ public class HMobStore extends HStore {
String fileName = MobUtils.getMobFileName(reference);
Tag tableNameTag = MobUtils.getTableNameTag(reference);
if (tableNameTag != null) {
byte[] tableName = tableNameTag.getValue();
String tableNameString = Bytes.toString(tableName);
String tableNameString = TagUtil.getValueAsString(tableNameTag);
List<Path> locations = map.get(tableNameString);
if (locations == null) {
IdLock.Entry lockEntry = keyLock.getLockEntry(tableNameString.hashCode());
@ -347,7 +347,7 @@ public class HMobStore extends HStore {
locations = map.get(tableNameString);
if (locations == null) {
locations = new ArrayList<Path>(2);
TableName tn = TableName.valueOf(tableName);
TableName tn = TableName.valueOf(tableNameString);
locations.add(MobUtils.getMobFamilyPath(conf, tn, family.getNameAsString()));
locations.add(HFileArchiveUtil.getStoreArchivePath(conf, tn, MobUtils
.getMobRegionInfo(tn).getEncodedName(), family.getNameAsString()));

View File

@ -69,6 +69,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
@ -94,6 +95,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagRewriteCell;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -3667,8 +3669,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
for (int i = 0; i < listSize; i++) {
Cell cell = cells.get(i);
List<Tag> newTags = new ArrayList<Tag>();
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell);
// Carry forward existing tags
@ -3685,11 +3686,11 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// above may change when there are more tag based features in core.
if (m.getTTL() != Long.MAX_VALUE) {
// Add a cell TTL tag
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(m.getTTL())));
newTags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(m.getTTL())));
}
// Rewrite the cell with the updated set of tags
cells.set(i, new TagRewriteCell(cell, Tag.fromList(newTags)));
cells.set(i, new TagRewriteCell(cell, TagUtil.fromList(newTags)));
}
}
}
@ -7073,8 +7074,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
private static List<Tag> carryForwardTags(final Cell cell, final List<Tag> tags) {
if (cell.getTagsLength() <= 0) return tags;
List<Tag> newTags = tags == null? new ArrayList<Tag>(): /*Append Tags*/tags;
Iterator<Tag> i =
CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
Iterator<Tag> i = CellUtil.tagsIterator(cell);
while (i.hasNext()) newTags.add(i.next());
return newTags;
}
@ -7178,11 +7178,12 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
if (mutate.getTTL() != Long.MAX_VALUE) {
// Add the new TTL tag
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL())));
newTags.add(
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL())));
}
// Rebuild tags
byte[] tagBytes = Tag.fromList(newTags);
byte[] tagBytes = TagUtil.fromList(newTags);
// allocate an empty cell once
newCell = new KeyValue(row.length, cell.getFamilyLength(),
@ -7216,9 +7217,10 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
if (mutate.getTTL() != Long.MAX_VALUE) {
List<Tag> newTags = new ArrayList<Tag>(1);
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL())));
newTags.add(
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutate.getTTL())));
// Add the new TTL tag
newCell = new TagRewriteCell(cell, Tag.fromList(newTags));
newCell = new TagRewriteCell(cell, TagUtil.fromList(newTags));
} else {
newCell = cell;
}
@ -7439,7 +7441,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// Add the TTL tag if the mutation carried one
if (mutation.getTTL() != Long.MAX_VALUE) {
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutation.getTTL())));
newTags.add(
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(mutation.getTTL())));
}
Cell newKV = new KeyValue(row, 0, row.length,

View File

@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
@ -1779,28 +1780,24 @@ public class HStore implements Store {
* @return true if the cell is expired
*/
static boolean isCellTTLExpired(final Cell cell, final long oldestTimestamp, final long now) {
// Do not create an Iterator or Tag objects unless the cell actually has tags.
if (cell.getTagsLength() > 0) {
// Look for a TTL tag first. Use it instead of the family setting if
// found. If a cell has multiple TTLs, resolve the conflict by using the
// first tag encountered.
Iterator<Tag> i = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (i.hasNext()) {
Tag t = i.next();
if (TagType.TTL_TAG_TYPE == t.getType()) {
// Unlike in schema cell TTLs are stored in milliseconds, no need
// to convert
long ts = cell.getTimestamp();
assert t.getTagLength() == Bytes.SIZEOF_LONG;
long ttl = Bytes.toLong(t.getBuffer(), t.getTagOffset(), t.getTagLength());
if (ts + ttl < now) {
return true;
}
// Per cell TTLs cannot extend lifetime beyond family settings, so
// fall through to check that
break;
// Look for a TTL tag first. Use it instead of the family setting if
// found. If a cell has multiple TTLs, resolve the conflict by using the
// first tag encountered.
Iterator<Tag> i = CellUtil.tagsIterator(cell);
while (i.hasNext()) {
Tag t = i.next();
if (TagType.TTL_TAG_TYPE == t.getType()) {
// Unlike in schema cell TTLs are stored in milliseconds, no need
// to convert
long ts = cell.getTimestamp();
assert t.getValueLength() == Bytes.SIZEOF_LONG;
long ttl = TagUtil.getValueAsLong(t);
if (ts + ttl < now) {
return true;
}
// Per cell TTLs cannot extend lifetime beyond family settings, so
// fall through to check that
break;
}
}
return false;

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -658,8 +659,7 @@ public class AccessControlLists {
return null;
}
List<Permission> results = Lists.newArrayList();
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == ACL_TAG_TYPE) {
@ -668,7 +668,12 @@ public class AccessControlLists {
// use the builder
AccessControlProtos.UsersAndPermissions.Builder builder =
AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
if (tag.hasArray()) {
ProtobufUtil.mergeFrom(builder, tag.getValueArray(), tag.getValueOffset(),
tag.getValueLength());
} else {
ProtobufUtil.mergeFrom(builder,TagUtil.cloneValue(tag));
}
ListMultimap<String,Permission> kvPerms =
ProtobufUtil.toUsersAndPermissions(builder.build());
// Are there permissions for this user?

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security.access;
import java.io.IOException;
import java.net.InetAddress;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
@ -34,6 +35,7 @@ import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@ -54,6 +56,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagRewriteCell;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
@ -882,15 +885,13 @@ public class AccessController extends BaseMasterAndRegionObserver
List<Cell> newCells = Lists.newArrayList();
for (Cell cell: e.getValue()) {
// Prepend the supplied perms in a new ACL tag to an update list of tags for the cell
List<Tag> tags = Lists.newArrayList(new Tag(AccessControlLists.ACL_TAG_TYPE, perms));
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
while (tagIterator.hasNext()) {
tags.add(tagIterator.next());
}
List<Tag> tags = new ArrayList<Tag>();
tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, perms));
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell);
while (tagIterator.hasNext()) {
tags.add(tagIterator.next());
}
newCells.add(new TagRewriteCell(cell, Tag.fromList(tags)));
newCells.add(new TagRewriteCell(cell, TagUtil.fromList(tags)));
}
// This is supposed to be safe, won't CME
e.setValue(newCells);
@ -915,14 +916,10 @@ public class AccessController extends BaseMasterAndRegionObserver
return;
}
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsItr.hasNext()) {
if (tagsItr.next().getType() == AccessControlLists.ACL_TAG_TYPE) {
throw new AccessDeniedException("Mutation contains cell with reserved type tag");
}
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cellScanner.current());
while (tagsItr.hasNext()) {
if (tagsItr.next().getType() == AccessControlLists.ACL_TAG_TYPE) {
throw new AccessDeniedException("Mutation contains cell with reserved type tag");
}
}
}
@ -1997,32 +1994,21 @@ public class AccessController extends BaseMasterAndRegionObserver
// Collect any ACLs from the old cell
List<Tag> tags = Lists.newArrayList();
List<Tag> aclTags = Lists.newArrayList();
ListMultimap<String,Permission> perms = ArrayListMultimap.create();
if (oldCell != null) {
// Save an object allocation where we can
if (oldCell.getTagsLength() > 0) {
Iterator<Tag> tagIterator = CellUtil.tagsIterator(oldCell.getTagsArray(),
oldCell.getTagsOffset(), oldCell.getTagsLength());
while (tagIterator.hasNext()) {
Tag tag = tagIterator.next();
if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) {
// Not an ACL tag, just carry it through
if (LOG.isTraceEnabled()) {
LOG.trace("Carrying forward tag from " + oldCell + ": type " + tag.getType() +
" length " + tag.getTagLength());
}
tags.add(tag);
} else {
// Merge the perms from the older ACL into the current permission set
// TODO: The efficiency of this can be improved. Don't build just to unpack
// again, use the builder
AccessControlProtos.UsersAndPermissions.Builder builder =
AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
ListMultimap<String,Permission> kvPerms =
ProtobufUtil.toUsersAndPermissions(builder.build());
perms.putAll(kvPerms);
Iterator<Tag> tagIterator = CellUtil.tagsIterator(oldCell);
while (tagIterator.hasNext()) {
Tag tag = tagIterator.next();
if (tag.getType() != AccessControlLists.ACL_TAG_TYPE) {
// Not an ACL tag, just carry it through
if (LOG.isTraceEnabled()) {
LOG.trace("Carrying forward tag from " + oldCell + ": type " + tag.getType()
+ " length " + tag.getValueLength());
}
tags.add(tag);
} else {
aclTags.add(tag);
}
}
}
@ -2031,7 +2017,7 @@ public class AccessController extends BaseMasterAndRegionObserver
byte[] aclBytes = mutation.getACL();
if (aclBytes != null) {
// Yes, use it
tags.add(new Tag(AccessControlLists.ACL_TAG_TYPE, aclBytes));
tags.add(new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE, aclBytes));
} else {
// No, use what we carried forward
if (perms != null) {
@ -2041,8 +2027,7 @@ public class AccessController extends BaseMasterAndRegionObserver
if (LOG.isTraceEnabled()) {
LOG.trace("Carrying forward ACLs from " + oldCell + ": " + perms);
}
tags.add(new Tag(AccessControlLists.ACL_TAG_TYPE,
ProtobufUtil.toUsersAndPermissions(perms).toByteArray()));
tags.addAll(aclTags);
}
}
@ -2051,7 +2036,7 @@ public class AccessController extends BaseMasterAndRegionObserver
return newCell;
}
Cell rewriteCell = new TagRewriteCell(newCell, Tag.fromList(tags));
Cell rewriteCell = new TagRewriteCell(newCell, TagUtil.fromList(tags));
return rewriteCell;
}

View File

@ -42,6 +42,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
@ -49,6 +50,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
@ -90,7 +92,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
} catch (IOException e) {
// We write to a byte array. No Exception can happen.
}
LABELS_TABLE_TAGS[0] = new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray());
LABELS_TABLE_TAGS[0] = new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray());
}
public DefaultVisibilityLabelServiceImpl() {
@ -481,42 +483,37 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
@Override
public boolean evaluate(Cell cell) throws IOException {
boolean visibilityTagPresent = false;
// Save an object allocation where we can
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsItr.hasNext()) {
boolean includeKV = true;
Tag tag = tagsItr.next();
if (tag.getType() == VISIBILITY_TAG_TYPE) {
visibilityTagPresent = true;
int offset = tag.getTagOffset();
int endOffset = offset + tag.getTagLength();
while (offset < endOffset) {
Pair<Integer, Integer> result = StreamUtils
.readRawVarint32(tag.getBuffer(), offset);
int currLabelOrdinal = result.getFirst();
if (currLabelOrdinal < 0) {
// check for the absence of this label in the Scan Auth labels
// ie. to check BitSet corresponding bit is 0
int temp = -currLabelOrdinal;
if (bs.get(temp)) {
includeKV = false;
break;
}
} else {
if (!bs.get(currLabelOrdinal)) {
includeKV = false;
break;
}
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell);
while (tagsItr.hasNext()) {
boolean includeKV = true;
Tag tag = tagsItr.next();
if (tag.getType() == VISIBILITY_TAG_TYPE) {
visibilityTagPresent = true;
int offset = tag.getValueOffset();
int endOffset = offset + tag.getValueLength();
while (offset < endOffset) {
Pair<Integer, Integer> result = TagUtil.readVIntValuePart(tag, offset);
int currLabelOrdinal = result.getFirst();
if (currLabelOrdinal < 0) {
// check for the absence of this label in the Scan Auth labels
// ie. to check BitSet corresponding bit is 0
int temp = -currLabelOrdinal;
if (bs.get(temp)) {
includeKV = false;
break;
}
} else {
if (!bs.get(currLabelOrdinal)) {
includeKV = false;
break;
}
offset += result.getSecond();
}
if (includeKV) {
// We got one visibility expression getting evaluated to true. Good to include this
// KV in the result then.
return true;
}
offset += result.getSecond();
}
if (includeKV) {
// We got one visibility expression getting evaluated to true. Good to include this
// KV in the result then.
return true;
}
}
}
@ -596,8 +593,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
for (Tag tag : deleteVisTags) {
matchFound = false;
for (Tag givenTag : putVisTags) {
if (Bytes.equals(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength(),
givenTag.getBuffer(), givenTag.getTagOffset(), givenTag.getTagLength())) {
if (TagUtil.matchingValue(tag, givenTag)) {
matchFound = true;
break;
}
@ -621,10 +617,10 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
private static void getSortedTagOrdinals(List<List<Integer>> fullTagsList, Tag tag)
throws IOException {
List<Integer> tagsOrdinalInSortedOrder = new ArrayList<Integer>();
int offset = tag.getTagOffset();
int endOffset = offset + tag.getTagLength();
int offset = tag.getValueOffset();
int endOffset = offset + tag.getValueLength();
while (offset < endOffset) {
Pair<Integer, Integer> result = StreamUtils.readRawVarint32(tag.getBuffer(), offset);
Pair<Integer, Integer> result = TagUtil.readVIntValuePart(tag, offset);
tagsOrdinalInSortedOrder.add(result.getFirst());
offset += result.getSecond();
}
@ -678,11 +674,11 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
visibilityString.append(VisibilityConstants.CLOSED_PARAN).append(
VisibilityConstants.OR_OPERATOR);
}
int offset = tag.getTagOffset();
int endOffset = offset + tag.getTagLength();
int offset = tag.getValueOffset();
int endOffset = offset + tag.getValueLength();
boolean expressionStart = true;
while (offset < endOffset) {
Pair<Integer, Integer> result = StreamUtils.readRawVarint32(tag.getBuffer(), offset);
Pair<Integer, Integer> result = TagUtil.readVIntValuePart(tag, offset);
int currLabelOrdinal = result.getFirst();
if (currLabelOrdinal < 0) {
int temp = -currLabelOrdinal;

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagRewriteCell;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
@ -340,8 +341,7 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
Tag tag = pair.getSecond();
if (cellVisibility == null && tag != null) {
// May need to store only the first one
cellVisibility = new CellVisibility(Bytes.toString(tag.getBuffer(), tag.getTagOffset(),
tag.getTagLength()));
cellVisibility = new CellVisibility(TagUtil.getValueAsString(tag));
modifiedTagFound = true;
}
}
@ -368,14 +368,13 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
List<Cell> updatedCells = new ArrayList<Cell>();
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
List<Tag> tags = CellUtil.getTags(cell);
if (modifiedTagFound) {
// Rewrite the tags by removing the modified tags.
removeReplicationVisibilityTag(tags);
}
tags.addAll(visibilityTags);
Cell updatedCell = new TagRewriteCell(cell, Tag.fromList(tags));
Cell updatedCell = new TagRewriteCell(cell, TagUtil.fromList(tags));
updatedCells.add(updatedCell);
}
m.getFamilyCellMap().clear();
@ -472,28 +471,22 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
// cell visiblilty tags
// have been modified
Tag modifiedTag = null;
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) {
modifiedTag = tag;
break;
}
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) {
modifiedTag = tag;
break;
}
}
pair.setFirst(true);
pair.setSecond(modifiedTag);
return pair;
}
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsItr.hasNext()) {
if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) {
return pair;
}
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell);
while (tagsItr.hasNext()) {
if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) {
return pair;
}
}
pair.setFirst(true);
@ -520,13 +513,10 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
if (isSystemOrSuperUser()) {
return true;
}
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsItr.hasNext()) {
if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) {
return false;
}
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell);
while (tagsItr.hasNext()) {
if (RESERVED_VIS_TAG_TYPES.contains(tagsItr.next().getType())) {
return false;
}
}
return true;
@ -739,21 +729,17 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
boolean authCheck = authorizationEnabled && checkAuths && !(isSystemOrSuperUser());
tags.addAll(this.visibilityLabelService.createVisibilityExpTags(cellVisibility.getExpression(),
true, authCheck));
// Save an object allocation where we can
if (newCell.getTagsLength() > 0) {
// Carry forward all other tags
Iterator<Tag> tagsItr = CellUtil.tagsIterator(newCell.getTagsArray(),
newCell.getTagsOffset(), newCell.getTagsLength());
while (tagsItr.hasNext()) {
Tag tag = tagsItr.next();
if (tag.getType() != TagType.VISIBILITY_TAG_TYPE
&& tag.getType() != TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
tags.add(tag);
}
// Carry forward all other tags
Iterator<Tag> tagsItr = CellUtil.tagsIterator(newCell);
while (tagsItr.hasNext()) {
Tag tag = tagsItr.next();
if (tag.getType() != TagType.VISIBILITY_TAG_TYPE
&& tag.getType() != TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
tags.add(tag);
}
}
Cell rewriteCell = new TagRewriteCell(newCell, Tag.fromList(tags));
Cell rewriteCell = new TagRewriteCell(newCell, TagUtil.fromList(tags));
return rewriteCell;
}

View File

@ -24,10 +24,12 @@ import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagRewriteCell;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
@ -79,7 +81,8 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint {
byte[] modifiedVisExpression = visibilityLabelsService
.encodeVisibilityForReplication(visTags, serializationFormat);
if (modifiedVisExpression != null) {
nonVisTags.add(new Tag(TagType.STRING_VIS_TAG_TYPE, modifiedVisExpression));
nonVisTags
.add(new ArrayBackedTag(TagType.STRING_VIS_TAG_TYPE, modifiedVisExpression));
}
} catch (Exception ioe) {
LOG.error(
@ -92,7 +95,7 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint {
continue;
}
// Recreate the cell with the new tags and the existing tags
Cell newCell = new TagRewriteCell(cell, Tag.fromList(nonVisTags));
Cell newCell = new TagRewriteCell(cell, TagUtil.fromList(nonVisTags));
newEdit.add(newCell);
} else {
newEdit.add(cell);

View File

@ -35,11 +35,13 @@ import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter;
@ -74,7 +76,7 @@ public class VisibilityUtils {
public static final String VISIBILITY_LABEL_GENERATOR_CLASS =
"hbase.regionserver.scan.visibility.label.generator.class";
public static final String SYSTEM_LABEL = "system";
public static final Tag SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG = new Tag(
public static final Tag SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG = new ArrayBackedTag(
TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE,
VisibilityConstants.SORTED_ORDINAL_SERIALIZATION_FORMAT_TAG_VAL);
private static final String COMMA = ",";
@ -209,16 +211,13 @@ public class VisibilityUtils {
*/
public static Byte extractVisibilityTags(Cell cell, List<Tag> tags) {
Byte serializationFormat = null;
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
serializationFormat = tag.getBuffer()[tag.getTagOffset()];
} else if (tag.getType() == VISIBILITY_TAG_TYPE) {
tags.add(tag);
}
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
serializationFormat = TagUtil.getValueAsByte(tag);
} else if (tag.getType() == VISIBILITY_TAG_TYPE) {
tags.add(tag);
}
}
return serializationFormat;
@ -239,30 +238,23 @@ public class VisibilityUtils {
public static Byte extractAndPartitionTags(Cell cell, List<Tag> visTags,
List<Tag> nonVisTags) {
Byte serializationFormat = null;
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
serializationFormat = tag.getBuffer()[tag.getTagOffset()];
} else if (tag.getType() == VISIBILITY_TAG_TYPE) {
visTags.add(tag);
} else {
// ignore string encoded visibility expressions, will be added in replication handling
nonVisTags.add(tag);
}
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE) {
serializationFormat = TagUtil.getValueAsByte(tag);
} else if (tag.getType() == VISIBILITY_TAG_TYPE) {
visTags.add(tag);
} else {
// ignore string encoded visibility expressions, will be added in replication handling
nonVisTags.add(tag);
}
}
return serializationFormat;
}
public static boolean isVisibilityTagsPresent(Cell cell) {
if (cell.getTagsLength() == 0) {
return false;
}
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
if (tag.getType() == VISIBILITY_TAG_TYPE) {
@ -322,7 +314,7 @@ public class VisibilityUtils {
if (node.isSingleNode()) {
getLabelOrdinals(node, labelOrdinals, auths, checkAuths, ordinalProvider);
writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
} else {
NonLeafExpressionNode nlNode = (NonLeafExpressionNode) node;
@ -330,14 +322,14 @@ public class VisibilityUtils {
for (ExpressionNode child : nlNode.getChildExps()) {
getLabelOrdinals(child, labelOrdinals, auths, checkAuths, ordinalProvider);
writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
labelOrdinals.clear();
}
} else {
getLabelOrdinals(nlNode, labelOrdinals, auths, checkAuths, ordinalProvider);
writeLabelOrdinalsToStream(labelOrdinals, dos);
tags.add(new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
tags.add(new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray()));
baos.reset();
}
}

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
@ -339,12 +340,10 @@ public class WALPrettyPrinter {
stringMap.put("vlen", cell.getValueLength());
if (cell.getTagsLength() > 0) {
List<String> tagsString = new ArrayList<String>();
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag tag = tagsIterator.next();
tagsString.add((tag.getType()) + ":"
+ Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()));
tagsString.add((tag.getType()) + ":" + Bytes.toStringBinary(TagUtil.cloneValue(tag)));
}
stringMap.put("tag", tagsString);
}

View File

@ -1405,7 +1405,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
byte[] tag = generateData(this.rand, TAG_LENGTH);
Tag[] tags = new Tag[opts.noOfTags];
for (int n = 0; n < opts.noOfTags; n++) {
Tag t = new Tag((byte) n, tag);
Tag t = new ArrayBackedTag((byte) n, tag);
tags[n] = t;
}
KeyValue kv = new KeyValue(row, FAMILY_NAME, qualifier, HConstants.LATEST_TIMESTAMP,
@ -1493,7 +1493,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
byte[] tag = generateData(this.rand, TAG_LENGTH);
Tag[] tags = new Tag[opts.noOfTags];
for (int n = 0; n < opts.noOfTags; n++) {
Tag t = new Tag((byte) n, tag);
Tag t = new ArrayBackedTag((byte) n, tag);
tags[n] = t;
}
KeyValue kv = new KeyValue(row, FAMILY_NAME, qualifier, HConstants.LATEST_TIMESTAMP,

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@ -103,11 +104,11 @@ public class TestResultSizeEstimation {
Table table = TEST_UTIL.createTable(TABLE, FAMILIES);
Put p = new Put(ROW1);
p.add(new KeyValue(ROW1, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE,
new Tag[] { new Tag((byte)1, new byte[TAG_DATA_SIZE]) } ));
new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) } ));
table.put(p);
p = new Put(ROW2);
p.add(new KeyValue(ROW2, FAMILY, QUALIFIER, Long.MAX_VALUE, VALUE,
new Tag[] { new Tag((byte)1, new byte[TAG_DATA_SIZE]) } ));
new Tag[] { new ArrayBackedTag((byte)1, new byte[TAG_DATA_SIZE]) } ));
table.put(p);
Scan s = new Scan();

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeSeeker;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.BufferGrabbingByteArrayOutputStream;
@ -136,10 +137,10 @@ public class TestDataBlockEncoders {
} else {
byte[] metaValue1 = Bytes.toBytes("metaValue1");
byte[] metaValue2 = Bytes.toBytes("metaValue2");
kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1,
metaValue1) }));
kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1,
metaValue2) }));
kvList.add(new KeyValue(row, family, qualifier, 0l, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) }));
kvList.add(new KeyValue(row, family, qualifier, 0l, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) }));
}
testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
}
@ -160,10 +161,10 @@ public class TestDataBlockEncoders {
if (includesTags) {
byte[] metaValue1 = Bytes.toBytes("metaValue1");
byte[] metaValue2 = Bytes.toBytes("metaValue2");
kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1,
metaValue1) }));
kvList.add(new KeyValue(row, family, qualifier, 0l, value, new Tag[] { new Tag((byte) 1,
metaValue2) }));
kvList.add(new KeyValue(row, family, qualifier, 0l, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue1) }));
kvList.add(new KeyValue(row, family, qualifier, 0l, value,
new Tag[] { new ArrayBackedTag((byte) 1, metaValue2) }));
} else {
kvList.add(new KeyValue(row, family, qualifier, -1l, Type.Put, value));
kvList.add(new KeyValue(row, family, qualifier, -2l, Type.Put, value));
@ -416,10 +417,10 @@ public class TestDataBlockEncoders {
byte[] value0 = new byte[] { 'd' };
byte[] value1 = new byte[] { 0x00 };
if (includesTags) {
kvList.add(new KeyValue(row, family, qualifier0, 0, value0, new Tag[] { new Tag((byte) 1,
"value1") }));
kvList.add(new KeyValue(row, family, qualifier1, 0, value1, new Tag[] { new Tag((byte) 1,
"value1") }));
kvList.add(new KeyValue(row, family, qualifier0, 0, value0,
new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
kvList.add(new KeyValue(row, family, qualifier1, 0, value1,
new Tag[] { new ArrayBackedTag((byte) 1, "value1") }));
} else {
kvList.add(new KeyValue(row, family, qualifier0, 0, Type.Put, value0));
kvList.add(new KeyValue(row, family, qualifier1, 0, Type.Put, value1));

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
@ -148,7 +149,7 @@ public class TestEncodedSeekers {
byte[] value = dataGenerator.generateRandomSizeValue(key, col);
if (includeTags) {
Tag[] tag = new Tag[1];
tag[0] = new Tag((byte) 1, "Visibility");
tag[0] = new ArrayBackedTag((byte) 1, "Visibility");
KeyValue kv = new KeyValue(key, CF_BYTES, col, HConstants.LATEST_TIMESTAMP, value, tag);
put.add(kv);
} else {

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker;
@ -280,7 +281,7 @@ public class TestPrefixTreeEncoding {
kvset.add(kv);
} else {
KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l,
getValue(batchId, i, j), new Tag[] { new Tag((byte) 1, "metaValue1") });
getValue(batchId, i, j), new Tag[] { new ArrayBackedTag((byte) 1, "metaValue1") });
kvset.add(kv);
}
}
@ -308,7 +309,7 @@ public class TestPrefixTreeEncoding {
kvset.add(kv);
} else {
KeyValue kv = new KeyValue(getRowKey(batchId, i), CF_BYTES, getQualifier(j), 0l,
getValue(batchId, i, j), new Tag[] { new Tag((byte) 1, "metaValue1") });
getValue(batchId, i, j), new Tag[] { new ArrayBackedTag((byte) 1, "metaValue1") });
kvset.add(kv);
}
}

View File

@ -39,6 +39,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@ -385,7 +386,7 @@ public class TestCacheOnWrite {
byte[] value = RandomKeyValueUtil.randomValue(rand);
KeyValue kv;
if(useTags) {
Tag t = new Tag((byte) 1, "visibility");
Tag t = new ArrayBackedTag((byte) 1, "visibility");
List<Tag> tagList = new ArrayList<Tag>();
tagList.add(t);
Tag[] tags = new Tag[1];
@ -434,7 +435,7 @@ public class TestCacheOnWrite {
String valueStr = "value_" + rowStr + "_" + qualStr;
for (int iTS = 0; iTS < 5; ++iTS) {
if (useTags) {
Tag t = new Tag((byte) 1, "visibility");
Tag t = new ArrayBackedTag((byte) 1, "visibility");
Tag[] tags = new Tag[1];
tags[0] = t;
KeyValue kv = new KeyValue(Bytes.toBytes(rowStr), cfBytes, Bytes.toBytes(qualStr),

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
@ -169,7 +170,7 @@ public class TestHFile {
for (int i = start; i < (start + n); i++) {
String key = String.format(localFormatter, Integer.valueOf(i));
if (useTags) {
Tag t = new Tag((byte) 1, "myTag1");
Tag t = new ArrayBackedTag((byte) 1, "myTag1");
Tag[] tags = new Tag[1];
tags[0] = t;
kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"),

View File

@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@ -167,8 +168,8 @@ public class TestHFileBlock {
if (!useTag) {
keyValues.add(new KeyValue(row, family, qualifier, timestamp, value));
} else {
keyValues.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag(
(byte) 1, Bytes.toBytes("myTagVal")) }));
keyValues.add(new KeyValue(row, family, qualifier, timestamp, value,
new Tag[] { new ArrayBackedTag((byte) 1, Bytes.toBytes("myTagVal")) }));
}
}

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.HFile.FileInfo;
@ -140,7 +141,7 @@ public class TestHFileWriterV3 {
for (int j = 0; j < 1 + rand.nextInt(4); j++) {
byte[] tagBytes = new byte[16];
rand.nextBytes(tagBytes);
tags.add(new Tag((byte) 1, tagBytes));
tags.add(new ArrayBackedTag((byte) 1, tagBytes));
}
keyValue = new KeyValue(keyBytes, null, null, HConstants.LATEST_TIMESTAMP,
valueBytes, tags);

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
@ -84,7 +85,7 @@ public class TestReseekTo {
Bytes.toBytes(value));
writer.append(kv);
} else if (tagUsage == TagUsage.ONLY_TAG) {
Tag t = new Tag((byte) 1, "myTag1");
Tag t = new ArrayBackedTag((byte) 1, "myTag1");
Tag[] tags = new Tag[1];
tags[0] = t;
kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"),
@ -92,7 +93,7 @@ public class TestReseekTo {
writer.append(kv);
} else {
if (key % 4 == 0) {
Tag t = new Tag((byte) 1, "myTag1");
Tag t = new ArrayBackedTag((byte) 1, "myTag1");
Tag[] tags = new Tag[1];
tags[0] = t;
kv = new KeyValue(Bytes.toBytes(key), Bytes.toBytes("family"), Bytes.toBytes("qual"),

View File

@ -45,6 +45,8 @@ import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.OffheapKeyValue;
import org.apache.hadoop.hbase.ShareableMemory;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -88,7 +90,7 @@ public class TestSeekTo {
return new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
Bytes.toBytes("value"));
} else if (tagUsage == TagUsage.ONLY_TAG) {
Tag t = new Tag((byte) 1, "myTag1");
Tag t = new ArrayBackedTag((byte) 1, "myTag1");
Tag[] tags = new Tag[1];
tags[0] = t;
return new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
@ -100,7 +102,7 @@ public class TestSeekTo {
Bytes.toBytes("qualifier"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
} else {
switchKVs = false;
Tag t = new Tag((byte) 1, "myTag1");
Tag t = new ArrayBackedTag((byte) 1, "myTag1");
Tag[] tags = new Tag[1];
tags[0] = t;
return new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
@ -174,11 +176,10 @@ public class TestSeekTo {
assertEquals("i", toRowStr(scanner.getCell()));
Cell cell = scanner.getCell();
if (tagUsage != TagUsage.NO_TAG && cell.getTagsLength() > 0) {
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell);
while (tagsIterator.hasNext()) {
Tag next = tagsIterator.next();
assertEquals("myTag1", Bytes.toString(next.getValue()));
assertEquals("myTag1", Bytes.toString(TagUtil.cloneValue(next)));
}
}
assertTrue(scanner.seekBefore(toKV("k", tagUsage)));

View File

@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Scan;
@ -57,7 +58,6 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.mob.MobConstants;
import org.apache.hadoop.hbase.mob.MobUtils;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.StoreFile.Reader;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.NoLimitCompactionThroughputController;
import org.apache.hadoop.hbase.security.EncryptionUtil;
@ -190,7 +190,8 @@ public class TestHMobStore {
String targetPathName = MobUtils.formatDate(currentDate);
byte[] referenceValue = Bytes.toBytes(targetPathName + Path.SEPARATOR + mobFilePath.getName());
Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, store.getTableName().getName());
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
store.getTableName().getName());
KeyValue kv1 = new KeyValue(row, family, qf1, Long.MAX_VALUE, referenceValue);
KeyValue kv2 = new KeyValue(row, family, qf2, Long.MAX_VALUE, referenceValue);
KeyValue kv3 = new KeyValue(row2, family, qf3, Long.MAX_VALUE, referenceValue);

View File

@ -96,7 +96,7 @@ import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Append;
@ -6335,16 +6335,16 @@ public class TestHRegion {
long now = EnvironmentEdgeManager.currentTime();
// Add a cell that will expire in 5 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q1, now,
HConstants.EMPTY_BYTE_ARRAY, new Tag[] {
HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] {
// TTL tags specify ts in milliseconds
new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } )));
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } )));
// Add a cell that will expire after 10 seconds via family setting
region.put(new Put(row).addColumn(fam1, q2, now, HConstants.EMPTY_BYTE_ARRAY));
// Add a cell that will expire in 15 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q3, now + 10000 - 1,
HConstants.EMPTY_BYTE_ARRAY, new Tag[] {
HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] {
// TTL tags specify ts in milliseconds
new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } )));
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) } )));
// Add a cell that will expire in 20 seconds via family setting
region.put(new Put(row).addColumn(fam1, q4, now + 10000 - 1, HConstants.EMPTY_BYTE_ARRAY));

View File

@ -31,9 +31,11 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
@ -86,7 +88,7 @@ public class TestStoreFileScannerWithTagCompression {
kv.getRowLength()));
List<Tag> tags = KeyValueUtil.ensureKeyValue(kv).getTags();
assertEquals(1, tags.size());
assertEquals("tag3", Bytes.toString(tags.get(0).getValue()));
assertEquals("tag3", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
} finally {
s.close();
}
@ -97,9 +99,9 @@ public class TestStoreFileScannerWithTagCompression {
byte[] qualifier = Bytes.toBytes("q");
long now = System.currentTimeMillis();
byte[] b = Bytes.toBytes("k1");
Tag t1 = new Tag((byte) 1, "tag1");
Tag t2 = new Tag((byte) 2, "tag2");
Tag t3 = new Tag((byte) 3, "tag3");
Tag t1 = new ArrayBackedTag((byte) 1, "tag1");
Tag t2 = new ArrayBackedTag((byte) 2, "tag2");
Tag t3 = new ArrayBackedTag((byte) 3, "tag3");
try {
writer.append(new KeyValue(b, fam, qualifier, now, b, new Tag[] { t1 }));
b = Bytes.toBytes("k3");

View File

@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Durability;
@ -325,7 +327,7 @@ public class TestTags {
if (CellUtil.matchingRow(current, row)) {
assertEquals(1, TestCoprocessorForTags.tags.size());
Tag tag = TestCoprocessorForTags.tags.get(0);
assertEquals(bigTagLen, tag.getTagLength());
assertEquals(bigTagLen, tag.getValueLength());
} else {
assertEquals(0, TestCoprocessorForTags.tags.size());
}
@ -350,7 +352,7 @@ public class TestTags {
if (CellUtil.matchingRow(current, row)) {
assertEquals(1, TestCoprocessorForTags.tags.size());
Tag tag = TestCoprocessorForTags.tags.get(0);
assertEquals(bigTagLen, tag.getTagLength());
assertEquals(bigTagLen, tag.getValueLength());
} else {
assertEquals(0, TestCoprocessorForTags.tags.size());
}
@ -403,7 +405,7 @@ public class TestTags {
List<Tag> tags = TestCoprocessorForTags.tags;
assertEquals(3L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(1, tags.size());
assertEquals("tag1", Bytes.toString(tags.get(0).getValue()));
assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
@ -421,7 +423,7 @@ public class TestTags {
// We cannot assume the ordering of tags
List<String> tagValues = new ArrayList<String>();
for (Tag tag: tags) {
tagValues.add(Bytes.toString(tag.getValue()));
tagValues.add(Bytes.toString(TagUtil.cloneValue(tag)));
}
assertTrue(tagValues.contains("tag1"));
assertTrue(tagValues.contains("tag2"));
@ -445,7 +447,7 @@ public class TestTags {
tags = TestCoprocessorForTags.tags;
assertEquals(4L, Bytes.toLong(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
assertEquals(1, tags.size());
assertEquals("tag2", Bytes.toString(tags.get(0).getValue()));
assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
@ -466,7 +468,7 @@ public class TestTags {
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(1, tags.size());
assertEquals("tag1", Bytes.toString(tags.get(0).getValue()));
assertEquals("tag1", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
@ -483,7 +485,7 @@ public class TestTags {
// We cannot assume the ordering of tags
tagValues.clear();
for (Tag tag: tags) {
tagValues.add(Bytes.toString(tag.getValue()));
tagValues.add(Bytes.toString(TagUtil.cloneValue(tag)));
}
assertTrue(tagValues.contains("tag1"));
assertTrue(tagValues.contains("tag2"));
@ -506,7 +508,7 @@ public class TestTags {
kv = KeyValueUtil.ensureKeyValue(result.getColumnLatestCell(f, q));
tags = TestCoprocessorForTags.tags;
assertEquals(1, tags.size());
assertEquals("tag2", Bytes.toString(tags.get(0).getValue()));
assertEquals("tag2", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
} finally {
TestCoprocessorForTags.checkTagPresence = false;
TestCoprocessorForTags.tags = null;
@ -569,7 +571,7 @@ public class TestTags {
if (cf == null) {
cf = CellUtil.cloneFamily(kv);
}
Tag tag = new Tag((byte) 1, attribute);
Tag tag = new ArrayBackedTag((byte) 1, attribute);
List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag);
@ -611,7 +613,7 @@ public class TestTags {
CellScanner cellScanner = result.cellScanner();
if (cellScanner.advance()) {
Cell cell = cellScanner.current();
tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
}
}

View File

@ -24,9 +24,10 @@ import java.util.List;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.DataOutputBuffer;
@ -108,7 +109,7 @@ public class TestKeyValueCompression {
byte[] value = Bytes.toBytes("myValue");
List<Tag> tags = new ArrayList<Tag>(noOfTags);
for (int i = 1; i <= noOfTags; i++) {
tags.add(new Tag((byte) i, Bytes.toBytes("tagValue" + i)));
tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i)));
}
return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags);
}

View File

@ -30,6 +30,8 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.codec.Codec.Decoder;
import org.apache.hadoop.hbase.codec.Codec.Encoder;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
@ -69,7 +71,7 @@ public class TestWALCellCodecWithCompression {
KeyValue kv = (KeyValue) decoder.current();
List<Tag> tags = kv.getTags();
assertEquals(1, tags.size());
assertEquals("tagValue1", Bytes.toString(tags.get(0).getValue()));
assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
decoder.advance();
kv = (KeyValue) decoder.current();
tags = kv.getTags();
@ -78,8 +80,8 @@ public class TestWALCellCodecWithCompression {
kv = (KeyValue) decoder.current();
tags = kv.getTags();
assertEquals(2, tags.size());
assertEquals("tagValue1", Bytes.toString(tags.get(0).getValue()));
assertEquals("tagValue2", Bytes.toString(tags.get(1).getValue()));
assertEquals("tagValue1", Bytes.toString(TagUtil.cloneValue(tags.get(0))));
assertEquals("tagValue2", Bytes.toString(TagUtil.cloneValue(tags.get(1))));
}
private KeyValue createKV(int noOfTags) {
@ -89,7 +91,7 @@ public class TestWALCellCodecWithCompression {
byte[] value = Bytes.toBytes("myValue");
List<Tag> tags = new ArrayList<Tag>(noOfTags);
for (int i = 1; i <= noOfTags; i++) {
tags.add(new Tag((byte) i, Bytes.toBytes("tagValue" + i)));
tags.add(new ArrayBackedTag((byte) i, Bytes.toBytes("tagValue" + i)));
}
return new KeyValue(row, cf, q, HConstants.LATEST_TIMESTAMP, value, tags);
}

View File

@ -39,6 +39,8 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -209,7 +211,7 @@ public class TestReplicationWithTags {
if (cf == null) {
cf = CellUtil.cloneFamily(kv);
}
Tag tag = new Tag(TAG_TYPE, attribute);
Tag tag = new ArrayBackedTag(TAG_TYPE, attribute);
List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag);
@ -238,7 +240,7 @@ public class TestReplicationWithTags {
// Check tag presence in the 1st cell in 1st Result
if (!results.isEmpty()) {
Cell cell = results.get(0);
tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
}
}

View File

@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Connection;
@ -2516,7 +2517,7 @@ public class TestAccessController extends SecureTestUtil {
Table t = conn.getTable(TEST_TABLE);) {
KeyValue kv = new KeyValue(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER,
HConstants.LATEST_TIMESTAMP, HConstants.EMPTY_BYTE_ARRAY,
new Tag[] { new Tag(AccessControlLists.ACL_TAG_TYPE,
new Tag[] { new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE,
ProtobufUtil.toUsersAndPermissions(USER_OWNER.getShortName(),
new Permission(Permission.Action.READ)).toByteArray()) });
t.put(new Put(TEST_ROW).add(kv));

View File

@ -40,7 +40,9 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -58,6 +60,7 @@ import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode;
import org.apache.hadoop.hbase.security.visibility.expression.Operator;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -73,7 +76,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
private static final byte[] DUMMY_VALUE = new byte[0];
private static final byte STRING_SERIALIZATION_FORMAT = 2;
private static final Tag STRING_SERIALIZATION_FORMAT_TAG = new Tag(
private static final Tag STRING_SERIALIZATION_FORMAT_TAG = new ArrayBackedTag(
TagType.VISIBILITY_EXP_SERIALIZATION_FORMAT_TAG_TYPE,
new byte[] { STRING_SERIALIZATION_FORMAT });
private final ExpressionParser expressionParser = new ExpressionParser();
@ -281,28 +284,27 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
boolean visibilityTagPresent = false;
// Save an object allocation where we can
if (cell.getTagsLength() > 0) {
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
Iterator<Tag> tagsItr = CellUtil.tagsIterator(cell);
while (tagsItr.hasNext()) {
boolean includeKV = true;
Tag tag = tagsItr.next();
if (tag.getType() == VISIBILITY_TAG_TYPE) {
visibilityTagPresent = true;
int offset = tag.getTagOffset();
int endOffset = offset + tag.getTagLength();
int offset = tag.getValueOffset();
int endOffset = offset + tag.getValueLength();
while (offset < endOffset) {
short len = Bytes.toShort(tag.getBuffer(), offset);
short len = getTagValuePartAsShort(tag, offset);
offset += 2;
if (len < 0) {
// This is a NOT label.
len = (short) (-1 * len);
String label = Bytes.toString(tag.getBuffer(), offset, len);
String label = Bytes.toString(tag.getValueArray(), offset, len);
if (authLabelsFinal.contains(label)) {
includeKV = false;
break;
}
} else {
String label = Bytes.toString(tag.getBuffer(), offset, len);
String label = Bytes.toString(tag.getValueArray(), offset, len);
if (!authLabelsFinal.contains(label)) {
includeKV = false;
break;
@ -353,7 +355,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
dos.writeShort(bLabel.length);
dos.write(bLabel);
}
return new Tag(VISIBILITY_TAG_TYPE, baos.toByteArray());
return new ArrayBackedTag(VISIBILITY_TAG_TYPE, baos.toByteArray());
}
private void extractLabels(ExpressionNode node, List<String> labels, List<String> notLabels) {
@ -423,8 +425,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
for (Tag tag : deleteVisTags) {
matchFound = false;
for (Tag givenTag : putVisTags) {
if (Bytes.equals(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength(),
givenTag.getBuffer(), givenTag.getTagOffset(), givenTag.getTagLength())) {
if (TagUtil.matchingValue(tag, givenTag)) {
matchFound = true;
break;
}
@ -459,15 +460,15 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
visibilityString.append(VisibilityConstants.CLOSED_PARAN
+ VisibilityConstants.OR_OPERATOR);
}
int offset = tag.getTagOffset();
int endOffset = offset + tag.getTagLength();
int offset = tag.getValueOffset();
int endOffset = offset + tag.getValueLength();
boolean expressionStart = true;
while (offset < endOffset) {
short len = Bytes.toShort(tag.getBuffer(), offset);
short len = getTagValuePartAsShort(tag, offset);
offset += 2;
if (len < 0) {
len = (short) (-1 * len);
String label = Bytes.toString(tag.getBuffer(), offset, len);
String label = getTagValuePartAsString(tag, offset, len);
if (expressionStart) {
visibilityString.append(VisibilityConstants.OPEN_PARAN
+ VisibilityConstants.NOT_OPERATOR + CellVisibility.quote(label));
@ -476,7 +477,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
+ VisibilityConstants.NOT_OPERATOR + CellVisibility.quote(label));
}
} else {
String label = Bytes.toString(tag.getBuffer(), offset, len);
String label = getTagValuePartAsString(tag, offset, len);
if (expressionStart) {
visibilityString.append(VisibilityConstants.OPEN_PARAN + CellVisibility.quote(label));
} else {
@ -496,4 +497,20 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
}
return null;
}
private static short getTagValuePartAsShort(Tag t, int offset) {
if (t.hasArray()) {
return Bytes.toShort(t.getValueArray(), offset);
}
return ByteBufferUtils.toShort(t.getValueByteBuffer(), offset);
}
private static String getTagValuePartAsString(Tag t, int offset, int length) {
if (t.hasArray()) {
return Bytes.toString(t.getValueArray(), offset, length);
}
byte[] b = new byte[length];
ByteBufferUtils.copyFromBufferToArray(b, t.getValueByteBuffer(), offset, 0, length);
return Bytes.toString(b);
}
}

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -183,7 +184,7 @@ public class TestVisibilityLabelReplicationWithExpAsString extends TestVisibilit
boolean foundNonVisTag = false;
for(Tag t : TestCoprocessorForTagsAtSink.tags) {
if(t.getType() == NON_VIS_TAG_TYPE) {
assertEquals(TEMP, Bytes.toString(t.getValue()));
assertEquals(TEMP, Bytes.toString(TagUtil.cloneValue(t)));
foundNonVisTag = true;
break;
}

View File

@ -43,8 +43,10 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.TagRewriteCell;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -284,11 +286,11 @@ public class TestVisibilityLabelsReplication {
for (Cell cell : cells) {
if ((Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), row, 0,
row.length))) {
List<Tag> tags = Tag
.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
List<Tag> tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(),
cell.getTagsLength());
for (Tag tag : tags) {
if (tag.getType() == TagType.STRING_VIS_TAG_TYPE) {
assertEquals(visTag, Bytes.toString(tag.getValue()));
assertEquals(visTag, TagUtil.getValueAsString(tag));
tagFound = true;
break;
}
@ -330,7 +332,7 @@ public class TestVisibilityLabelsReplication {
boolean foundNonVisTag = false;
for (Tag t : TestCoprocessorForTagsAtSink.tags) {
if (t.getType() == NON_VIS_TAG_TYPE) {
assertEquals(TEMP, Bytes.toString(t.getValue()));
assertEquals(TEMP, TagUtil.getValueAsString(t));
foundNonVisTag = true;
break;
}
@ -407,11 +409,11 @@ public class TestVisibilityLabelsReplication {
if (cf == null) {
cf = CellUtil.cloneFamily(kv);
}
Tag tag = new Tag((byte) NON_VIS_TAG_TYPE, attribute);
Tag tag = new ArrayBackedTag((byte) NON_VIS_TAG_TYPE, attribute);
List<Tag> tagList = new ArrayList<Tag>();
tagList.add(tag);
tagList.addAll(kv.getTags());
byte[] fromList = Tag.fromList(tagList);
byte[] fromList = TagUtil.fromList(tagList);
TagRewriteCell newcell = new TagRewriteCell(kv, fromList);
((List<Cell>) updatedCells).add(newcell);
}
@ -433,7 +435,7 @@ public class TestVisibilityLabelsReplication {
// Check tag presence in the 1st cell in 1st Result
if (!results.isEmpty()) {
Cell cell = results.get(0);
tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
tags = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
}
}

View File

@ -21,10 +21,13 @@ package org.apache.hadoop.hbase.util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@ -98,14 +101,11 @@ public class HFileTestUtil {
KeyValue kv = new KeyValue(key, family, qualifier, now, key);
if (withTag) {
// add a tag. Arbitrarily chose mob tag since we have a helper already.
Tag tableNameTag = new Tag(TagType.MOB_TABLE_NAME_TAG_TYPE, key);
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, key);
kv = MobUtils.createMobRefKeyValue(kv, key, tableNameTag);
// verify that the kv has the tag.
byte[] ta = kv.getTagsArray();
int toff = kv.getTagsOffset();
int tlen = kv.getTagsLength();
Tag t = Tag.getTag(ta, toff, tlen, TagType.MOB_TABLE_NAME_TAG_TYPE);
Tag t = CellUtil.getTag(kv, TagType.MOB_TABLE_NAME_TAG_TYPE);
if (t == null) {
throw new IllegalStateException("Tag didn't stick to KV " + kv.toString());
}
@ -130,15 +130,12 @@ public class HFileTestUtil {
ResultScanner s = table.getScanner(new Scan());
for (Result r : s) {
for (Cell c : r.listCells()) {
byte[] ta = c.getTagsArray();
int toff = c.getTagsOffset();
int tlen = c.getTagsLength();
Tag t = Tag.getTag(ta, toff, tlen, TagType.MOB_TABLE_NAME_TAG_TYPE);
Tag t = CellUtil.getTag(c, TagType.MOB_TABLE_NAME_TAG_TYPE);
if (t == null) {
fail(c.toString() + " has null tag");
continue;
}
byte[] tval = t.getValue();
byte[] tval = TagUtil.cloneValue(t);
assertArrayEquals(c.toString() + " has tag" + Bytes.toString(tval),
r.getRow(), tval);
}

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.MultiThreadedAction.DefaultDataGenerator;
@ -77,7 +78,7 @@ public class LoadTestDataGeneratorWithTags extends DefaultDataGenerator {
minTagLength + random.nextInt(maxTagLength - minTagLength));
tags = new ArrayList<Tag>();
for (int n = 0; n < numTags; n++) {
tags.add(new Tag((byte) 127, tag));
tags.add(new ArrayBackedTag((byte) 127, tag));
}
Cell updatedCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(),