HBASE-19122 Suspect methods on Cell to be deprecated

This commit is contained in:
Michael Stack 2017-12-18 15:20:15 -08:00
parent 9d0c7c6dfb
commit b4056d267a
No known key found for this signature in database
GPG Key ID: 9816C7FC8ACC93D2
55 changed files with 683 additions and 284 deletions

View File

@ -37,6 +37,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Consistency;
@ -72,7 +73,6 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
@ -1361,7 +1361,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_BARRIER_FAMILY)
.setQualifier(seqBytes)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(seqBytes)
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
@ -1369,7 +1369,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_META_FAMILY)
.setQualifier(tableNameCq)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(tableName)
.build());
return put;
@ -1383,7 +1383,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_META_FAMILY)
.setQualifier(daughterNameCq)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value)
.build());
return put;
@ -1396,7 +1396,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_META_FAMILY)
.setQualifier(parentNameCq)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value)
.build());
return put;
@ -1413,7 +1413,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.SPLITA_QUALIFIER)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(RegionInfo.toByteArray(splitA))
.build());
}
@ -1423,7 +1423,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.SPLITB_QUALIFIER)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(RegionInfo.toByteArray(splitB))
.build());
}
@ -1732,7 +1732,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.MERGEA_QUALIFIER)
.setTimestamp(putOfMerged.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(RegionInfo.toByteArray(regionA))
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
@ -1740,7 +1740,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(HConstants.MERGEB_QUALIFIER)
.setTimestamp(putOfMerged.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(RegionInfo.toByteArray(regionB))
.build());
@ -1985,7 +1985,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.REPLICATION_POSITION_FAMILY)
.setQualifier(Bytes.toBytes(peerId))
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(Math.abs(entry.getValue())))
.build());
puts.add(put);
@ -2153,7 +2153,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(HConstants.REGIONINFO_QUALIFIER)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(RegionInfo.toByteArray(hri))
.build());
return p;
@ -2170,7 +2170,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getServerColumn(replicaId))
.setTimestamp(time)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(sn.getAddress().toString()))
.build())
.add(builder.clear()
@ -2178,7 +2178,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getStartCodeColumn(replicaId))
.setTimestamp(time)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(sn.getStartcode()))
.build())
.add(builder.clear()
@ -2186,7 +2186,7 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(time)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}
@ -2199,21 +2199,21 @@ public class MetaTableAccessor {
.setFamily(getCatalogFamily())
.setQualifier(getServerColumn(replicaId))
.setTimestamp(now)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getStartCodeColumn(replicaId))
.setTimestamp(now)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build())
.add(builder.clear()
.setRow(p.getRow())
.setFamily(getCatalogFamily())
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(now)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build());
}
@ -2241,7 +2241,7 @@ public class MetaTableAccessor {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getSeqNumColumn(replicaId))
.setTimestamp(time)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(openSeqNum))
.build());
}

View File

@ -194,6 +194,11 @@ public class KeyOnlyFilter extends FilterBase {
return cell.getTypeByte();
}
@Override
public DataType getType() {
return cell.getType();
}
@Override
public long getSequenceId() {
return 0;
@ -307,6 +312,11 @@ public class KeyOnlyFilter extends FilterBase {
return 0;
}
@Override
public DataType getType() {
return cell.getType();
}
@Override
public byte[] getValueArray() {
if (lenAsVal) {

View File

@ -27,6 +27,7 @@ import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
import com.google.protobuf.TextFormat;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
@ -37,9 +38,10 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet;
import java.util.function.Function;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@ -534,7 +536,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.setTags(allTagsBytes)
.build());
@ -554,7 +556,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.build());
}

View File

@ -38,13 +38,14 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@ -676,7 +677,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.setTags(allTagsBytes)
.build());
@ -696,7 +697,7 @@ public final class ProtobufUtil {
.setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null)
.build());
}

View File

@ -24,8 +24,9 @@ import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@ -87,7 +88,7 @@ public class TestPut {
.setFamily(family)
.setQualifier(qualifier0)
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value0)
.build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
@ -95,7 +96,7 @@ public class TestPut {
.setFamily(family)
.setQualifier(qualifier1)
.setTimestamp(ts1)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value1)
.build());

View File

@ -177,6 +177,12 @@ public class TestHBaseRpcControllerImpl {
// unused
return null;
}
@Override
public DataType getType() {
// unused
return null;
}
};
}

View File

@ -147,6 +147,10 @@ public class ByteBufferKeyOnlyKeyValue extends ByteBufferCell {
return ByteBufferUtils.toByte(this.buf, this.offset + this.length - 1);
}
public DataType getType() {
return PrivateCellUtil.toDataType(getTypeByte());
}
@Override
public long getSequenceId() {
return 0;

View File

@ -17,9 +17,15 @@
*/
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@ -344,4 +350,31 @@ public class ByteBufferKeyValue extends ByteBufferCell implements ExtendedCell {
hash = 31 * hash + cell.getTypeByte();
return hash;
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsPosition();
int pos = offset;
int tagLen;
while (pos < offset + length) {
ByteBuffer tagsBuffer = getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}

View File

@ -133,8 +133,7 @@ public interface Cell {
/**
* @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc
* @deprecated since 2.0.0, use appropriate {@link CellUtil#isDelete} or
* {@link CellUtil#isPut(Cell)} methods instead. This will be removed in 3.0.0.
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Use {@link #getType()}.
*/
@Deprecated
byte getTypeByte();
@ -148,7 +147,9 @@ public interface Cell {
* {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's
* row is no longer involved in any operations that require strict consistency.
* @return seqId (always &gt; 0 if exists), or 0 if it no longer exists
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0.
*/
@Deprecated
long getSequenceId();
//7) Value
@ -173,12 +174,16 @@ public interface Cell {
/**
* Contiguous raw bytes representing tags that may start at any index in the containing array.
* @return the tags byte array
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
@Deprecated
byte[] getTagsArray();
/**
* @return the first offset where the tags start in the Cell
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
@Deprecated
int getTagsOffset();
/**
@ -190,6 +195,39 @@ public interface Cell {
* less than Integer.MAX_VALUE.
*
* @return the total length of the tags in the Cell.
* @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal.
*/
@Deprecated
int getTagsLength();
/**
* Returns the type of cell in a human readable format using {@link DataType}
* @return The data type this cell: one of Put, Delete, etc
*/
DataType getType();
/**
* The valid types for user to build the cell. Currently, This is subset of {@link KeyValue.Type}.
*/
public enum DataType {
Put((byte) 4),
Delete((byte) 8),
DeleteFamilyVersion((byte) 10),
DeleteColumn((byte) 12),
DeleteFamily((byte) 14);
private final byte code;
DataType(final byte c) {
this.code = c;
}
public byte getCode() {
return this.code;
}
}
}

View File

@ -26,18 +26,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public interface CellBuilder {
/**
* The valid types for user to build the cell.
* Currently, This is subset of {@link KeyValue.Type}.
*/
enum DataType {
Put,
Delete,
DeleteFamilyVersion,
DeleteColumn,
DeleteFamily
}
CellBuilder setRow(final byte[] row);
CellBuilder setRow(final byte[] row, final int rOffset, final int rLength);
@ -49,7 +37,7 @@ public interface CellBuilder {
CellBuilder setTimestamp(final long timestamp);
CellBuilder setType(final DataType type);
CellBuilder setType(final Cell.DataType type);
CellBuilder setValue(final byte[] value);
CellBuilder setValue(final byte[] value, final int vOffset, final int vLength);

View File

@ -564,40 +564,30 @@ public final class CellUtil {
* Note : Now only CPs can create cell with tags using the CP environment
* @return A new cell which is having the extra tags also added to it.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use CP environment to build Cell using {@link ExtendedCellBuilder}
*
*/
@Deprecated
public static Cell createCell(Cell cell, List<Tag> tags) {
return createCell(cell, Tag.fromList(tags));
return PrivateCellUtil.createCell(cell, tags);
}
/**
* Now only CPs can create cell with tags using the CP environment
* @return A new cell which is having the extra tags also added to it.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/
@Deprecated
public static Cell createCell(Cell cell, byte[] tags) {
if (cell instanceof ByteBufferCell) {
return new PrivateCellUtil.TagRewriteByteBufferCell((ByteBufferCell) cell, tags);
}
return new PrivateCellUtil.TagRewriteCell(cell, tags);
return PrivateCellUtil.createCell(cell, tags);
}
/**
* Now only CPs can create cell with tags using the CP environment
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use CP environment to build Cell using {@link ExtendedCellBuilder}
*/
@Deprecated
public static Cell createCell(Cell cell, byte[] value, byte[] tags) {
if (cell instanceof ByteBufferCell) {
return new PrivateCellUtil.ValueAndTagRewriteByteBufferCell((ByteBufferCell) cell, value,
tags);
}
return new PrivateCellUtil.ValueAndTagRewriteCell(cell, value, tags);
return PrivateCellUtil.createCell(cell, value, tags);
}
/**

View File

@ -131,4 +131,50 @@ public interface ExtendedCell extends RawCell, HeapSize, Cloneable {
* @param ts buffer containing the timestamp value
*/
void setTimestamp(byte[] ts) throws IOException;
/**
* A region-specific unique monotonically increasing sequence ID given to each Cell. It always
* exists for cells in the memstore but is not retained forever. It will be kept for
* {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's
* row is no longer involved in any operations that require strict consistency.
* @return seqId (always &gt; 0 if exists), or 0 if it no longer exists
*/
long getSequenceId();
/**
* Contiguous raw bytes representing tags that may start at any index in the containing array.
* @return the tags byte array
*/
byte[] getTagsArray();
/**
* @return the first offset where the tags start in the Cell
*/
int getTagsOffset();
/**
* HBase internally uses 2 bytes to store tags length in Cell. As the tags length is always a
* non-negative number, to make good use of the sign bit, the max of tags length is defined 2 *
* Short.MAX_VALUE + 1 = 65535. As a result, the return type is int, because a short is not
* capable of handling that. Please note that even if the return type is int, the max tags length
* is far less than Integer.MAX_VALUE.
* @return the total length of the tags in the Cell.
*/
int getTagsLength();
/**
* {@inheritDoc}
* <p>
* Note : This does not expose the internal types of Cells like {@link KeyValue.Type#Maximum} and
* {@link KeyValue.Type#Minimum}
*/
@Override
default DataType getType() {
return PrivateCellUtil.toDataType(getTypeByte());
}
/**
* @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc
*/
byte getTypeByte();
}

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
import java.util.List;
import org.apache.yetus.audience.InterfaceAudience;
/**
@ -26,8 +28,8 @@ import org.apache.yetus.audience.InterfaceAudience;
* Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance.
* TODO: ditto for ByteBufferCell?
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
public interface ExtendedCellBuilder extends CellBuilder {
@InterfaceAudience.Private
public interface ExtendedCellBuilder extends RawCellBuilder {
@Override
ExtendedCellBuilder setRow(final byte[] row);
@Override
@ -47,7 +49,7 @@ public interface ExtendedCellBuilder extends CellBuilder {
ExtendedCellBuilder setTimestamp(final long timestamp);
@Override
ExtendedCellBuilder setType(final DataType type);
ExtendedCellBuilder setType(final Cell.DataType type);
ExtendedCellBuilder setType(final byte type);
@ -62,11 +64,17 @@ public interface ExtendedCellBuilder extends CellBuilder {
@Override
ExtendedCellBuilder clear();
// TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[]
// we have this method for performance reasons so that if one could create a cell directly from
// the tag byte[] of the cell without having to convert to a list of Tag(s) and again adding it
// back.
ExtendedCellBuilder setTags(final byte[] tags);
// TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[]
// we have this method for performance reasons so that if one could create a cell directly from
// the tag byte[] of the cell without having to convert to a list of Tag(s) and again adding it
// back.
ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength);
@Override
ExtendedCellBuilder setTags(List<Tag> tags);
/**
* Internal usage. Be careful before you use this while building a cell
* @param seqId set the seqId

View File

@ -24,25 +24,17 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public final class ExtendedCellBuilderFactory {
public static ExtendedCellBuilder create(CellBuilderType type) {
return create(type, true);
}
/**
* Allows creating a cell with the given CellBuilderType.
* @param type the type of CellBuilder(DEEP_COPY or SHALLOW_COPY).
* @param allowSeqIdUpdate if seqId can be updated. CPs are not allowed to update
* the seqId
* @return the cell that is created
*/
public static ExtendedCellBuilder create(CellBuilderType type, boolean allowSeqIdUpdate) {
public static ExtendedCellBuilder create(CellBuilderType type) {
switch (type) {
case SHALLOW_COPY:
// CPs are not allowed to update seqID and they always use DEEP_COPY. So we have not
// passing 'allowSeqIdUpdate' to IndividualBytesFieldCellBuilder
return new IndividualBytesFieldCellBuilder();
case DEEP_COPY:
return new KeyValueBuilder(allowSeqIdUpdate);
return new KeyValueBuilder();
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
import java.util.List;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.yetus.audience.InterfaceAudience;
@ -40,12 +42,6 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
protected byte[] tags = null;
protected int tagsOffset = 0;
protected int tagsLength = 0;
// Will go away once we do with RawCellBuilder
protected boolean allowSeqIdUpdate = false;
public ExtendedCellBuilderImpl(boolean allowSeqIdUpdate) {
this.allowSeqIdUpdate = allowSeqIdUpdate;
}
@Override
public ExtendedCellBuilder setRow(final byte[] row) {
@ -93,8 +89,8 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
}
@Override
public ExtendedCellBuilder setType(final DataType type) {
this.type = toKeyValueType(type);
public ExtendedCellBuilder setType(final Cell.DataType type) {
this.type = PrivateCellUtil.toTypeByte(type);
return this;
}
@ -130,13 +126,16 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
return this;
}
@Override
public ExtendedCellBuilder setTags(List<Tag> tags) {
byte[] tagBytes = TagUtil.fromList(tags);
return setTags(tagBytes);
}
@Override
public ExtendedCellBuilder setSequenceId(final long seqId) {
if (allowSeqIdUpdate) {
this.seqId = seqId;
return this;
}
throw new UnsupportedOperationException("SeqId cannot be set on this cell");
this.seqId = seqId;
return this;
}
private void checkBeforeBuild() {
@ -175,15 +174,4 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
tagsLength = 0;
return this;
}
private static KeyValue.Type toKeyValueType(DataType type) {
switch (type) {
case Put: return KeyValue.Type.Put;
case Delete: return KeyValue.Type.Delete;
case DeleteColumn: return KeyValue.Type.DeleteColumn;
case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion;
case DeleteFamily: return KeyValue.Type.DeleteFamily;
default: throw new UnsupportedOperationException("Unsupported data type:" + type);
}
}
}

View File

@ -18,6 +18,13 @@
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@ -297,4 +304,30 @@ public class IndividualBytesFieldCell implements ExtendedCell {
public String toString() {
return CellUtil.toString(this, true);
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}

View File

@ -22,14 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl {
public IndividualBytesFieldCellBuilder() {
this(true);
}
public IndividualBytesFieldCellBuilder(boolean allowSeqIdUpdate) {
super(allowSeqIdUpdate);
}
@Override
public ExtendedCell innerBuild() {
return new IndividualBytesFieldCell(row, rOffset, rLength,

View File

@ -19,6 +19,7 @@
*/
package org.apache.hadoop.hbase;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import static org.apache.hadoop.hbase.util.Bytes.len;
import java.io.DataInput;
@ -29,8 +30,10 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -1521,19 +1524,6 @@ public class KeyValue implements ExtendedCell {
return tagsLen;
}
/**
* Returns any tags embedded in the KeyValue. Used in testcases.
* @return The tags
*/
@Override
public List<Tag> getTags() {
int tagsLength = getTagsLength();
if (tagsLength == 0) {
return EMPTY_ARRAY_LIST;
}
return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength);
}
/**
* @return the backing array of the entire KeyValue (all KeyValue fields are in a single array)
*/
@ -2564,4 +2554,30 @@ public class KeyValue implements ExtendedCell {
kv.setSequenceId(this.getSequenceId());
return kv;
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}

View File

@ -22,14 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
class KeyValueBuilder extends ExtendedCellBuilderImpl {
KeyValueBuilder() {
this(true);
}
KeyValueBuilder(boolean allowSeqIdUpdate) {
super(allowSeqIdUpdate);
}
@Override
protected ExtendedCell innerBuild() {
KeyValue kv = new KeyValue(row, rOffset, rLength,

View File

@ -21,6 +21,7 @@ import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import com.google.common.annotations.VisibleForTesting;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
@ -31,6 +32,7 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.io.HeapSize;
@ -43,6 +45,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Utility methods helpful slinging {@link Cell} instances. It has more powerful and
* rich set of APIs than those in {@link CellUtil} for internal usage.
@ -107,7 +110,7 @@ public final class PrivateCellUtil {
* @return A new cell which is having the extra tags also added to it.
*/
public static Cell createCell(Cell cell, List<Tag> tags) {
return createCell(cell, Tag.fromList(tags));
return createCell(cell, TagUtil.fromList(tags));
}
/**
@ -311,6 +314,32 @@ public final class PrivateCellUtil {
Cell clonedBaseCell = ((ExtendedCell) this.cell).deepClone();
return new TagRewriteCell(clonedBaseCell, this.tags);
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}
static class TagRewriteByteBufferCell extends ByteBufferCell implements ExtendedCell {
@ -544,6 +573,33 @@ public final class PrivateCellUtil {
public int getTagsPosition() {
return 0;
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsPosition();
int pos = offset;
int tagLen;
while (pos < offset + length) {
ByteBuffer tagsBuffer = getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}
static class ValueAndTagRewriteCell extends TagRewriteCell {
@ -928,7 +984,7 @@ public final class PrivateCellUtil {
return CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
}
private static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
public static Iterator<Tag> tagsIterator(final ByteBuffer tags, final int offset,
final int length) {
return new Iterator<Tag>() {
private int pos = offset;
@ -1231,6 +1287,29 @@ public final class PrivateCellUtil {
cell.getQualifierLength());
}
public static Cell.DataType toDataType(byte type) {
Type codeToType = KeyValue.Type.codeToType(type);
switch (codeToType) {
case Put: return Cell.DataType.Put;
case Delete: return Cell.DataType.Delete;
case DeleteColumn: return Cell.DataType.DeleteColumn;
case DeleteFamily: return Cell.DataType.DeleteFamily;
case DeleteFamilyVersion: return Cell.DataType.DeleteFamilyVersion;
default: throw new UnsupportedOperationException("Invalid type of cell "+type);
}
}
public static KeyValue.Type toTypeByte(Cell.DataType type) {
switch (type) {
case Put: return KeyValue.Type.Put;
case Delete: return KeyValue.Type.Delete;
case DeleteColumn: return KeyValue.Type.DeleteColumn;
case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion;
case DeleteFamily: return KeyValue.Type.DeleteFamily;
default: throw new UnsupportedOperationException("Unsupported data type:" + type);
}
}
/**
* Compare cell's value against given comparator
* @param cell
@ -1345,6 +1424,32 @@ public final class PrivateCellUtil {
public int getTagsLength() {
return 0;
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}
/**
@ -1498,6 +1603,33 @@ public final class PrivateCellUtil {
public int getValuePosition() {
return 0;
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsPosition();
int pos = offset;
int tagLen;
while (pos < offset + length) {
ByteBuffer tagsBuffer = getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) {
return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}
private static class FirstOnRowCell extends EmptyCell {
@ -1547,6 +1679,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Maximum.getCode();
}
@Override
public DataType getType() {
throw new UnsupportedOperationException();
}
}
private static class FirstOnRowByteBufferCell extends EmptyByteBufferCell {
@ -1597,6 +1734,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Maximum.getCode();
}
@Override
public DataType getType() {
throw new UnsupportedOperationException();
}
}
private static class LastOnRowByteBufferCell extends EmptyByteBufferCell {
@ -1647,6 +1789,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Minimum.getCode();
}
@Override
public DataType getType() {
throw new UnsupportedOperationException();
}
}
private static class FirstOnRowColByteBufferCell extends FirstOnRowByteBufferCell {
@ -1875,6 +2022,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.Minimum.getCode();
}
@Override
public DataType getType() {
throw new UnsupportedOperationException();
}
}
private static class LastOnRowColCell extends LastOnRowCell {
@ -2060,6 +2212,11 @@ public final class PrivateCellUtil {
public byte getTypeByte() {
return Type.DeleteFamily.getCode();
}
@Override
public DataType getType() {
return DataType.DeleteFamily;
}
}
/**
@ -2890,5 +3047,4 @@ public final class PrivateCellUtil {
public static Cell createFirstDeleteFamilyCellOnRow(final byte[] row, final byte[] fam) {
return new FirstOnRowDeleteFamilyCell(row, fam);
}
}

View File

@ -41,19 +41,14 @@ public interface RawCell extends Cell {
* Creates a list of tags in the current cell
* @return a list of tags
*/
default List<Tag> getTags() {
return PrivateCellUtil.getTags(this);
}
List<Tag> getTags();
/**
* Returns the specific tag of the given type
* @param type the type of the tag
* @return the specific tag if available or null
*/
// TODO : Move to individual cell impl
default Optional<Tag> getTag(byte type) {
return PrivateCellUtil.getTag(this, type);
}
Optional<Tag> getTag(byte type);
/**
* Check the length of tags. If it is invalid, throw IllegalArgumentException

View File

@ -20,7 +20,6 @@
package org.apache.hadoop.hbase;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@ -152,38 +151,6 @@ public interface Tag {
}
}
/**
* Write a list of tags into a byte array
* @param tags The list of tags
* @return the serialized tag data as bytes
*/
// TODO : Remove this when we move to RawCellBuilder
public static byte[] fromList(List<Tag> tags) {
if (tags == null || tags.isEmpty()) {
return HConstants.EMPTY_BYTE_ARRAY;
}
int length = 0;
for (Tag tag : tags) {
length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
byte[] b = new byte[length];
int pos = 0;
int tlen;
for (Tag tag : tags) {
tlen = tag.getValueLength();
pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(b, pos, tag.getType());
if (tag.hasArray()) {
pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
} else {
ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
pos, tlen);
pos += tlen;
}
}
return b;
}
/**
* Converts the value bytes of the given tag into a long value
* @param tag The Tag

View File

@ -136,6 +136,41 @@ public final class TagUtil {
return tags;
}
/**
* Write a list of tags into a byte array
* Note : these are all purely internal APIs. It helps in
* cases where we have set of tags and we would want to create a cell out of it. Say in Mobs we
* create a reference tags to indicate the presence of mob data. Also note that these are not
* exposed to CPs also
* @param tags The list of tags
* @return the serialized tag data as bytes
*/
public static byte[] fromList(List<Tag> tags) {
if (tags == null || tags.isEmpty()) {
return HConstants.EMPTY_BYTE_ARRAY;
}
int length = 0;
for (Tag tag : tags) {
length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE;
}
byte[] b = new byte[length];
int pos = 0;
int tlen;
for (Tag tag : tags) {
tlen = tag.getValueLength();
pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE);
pos = Bytes.putByte(b, pos, tag.getType());
if (tag.hasArray()) {
pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen);
} else {
ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(),
pos, tlen);
pos += tlen;
}
}
return b;
}
/**
* Iterator returned when no Tags. Used by CellUtil too.
*/

View File

@ -21,8 +21,14 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.ByteBufferTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
@ -32,6 +38,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.io.TagCompressionContext;
import org.apache.hadoop.hbase.io.util.LRUDictionary;
import org.apache.hadoop.hbase.io.util.StreamUtils;
@ -475,6 +482,32 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
// This is not used in actual flow. Throwing UnsupportedOperationException
throw new UnsupportedOperationException();
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(getTagsArray(), pos, Tag.TAG_LENGTH_SIZE);
if (getTagsArray()[pos + Tag.TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + Tag.TAG_LENGTH_SIZE));
}
pos += Tag.TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}
protected static class OffheapDecodedCell extends ByteBufferCell implements ExtendedCell {
@ -720,6 +753,35 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder {
// This is not used in actual flow. Throwing UnsupportedOperationException
throw new UnsupportedOperationException();
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsPosition();
int pos = offset;
int tagLen;
while (pos < offset + length) {
ByteBuffer tagsBuffer = getTagsByteBuffer();
tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, Tag.TAG_LENGTH_SIZE);
if (ByteBufferUtils.toByte(tagsBuffer, pos + Tag.TAG_LENGTH_SIZE) == type) {
return Optional
.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + Tag.TAG_LENGTH_SIZE));
}
pos += Tag.TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}
protected abstract static class BufferedEncodedSeeker<STATE extends SeekerState>

View File

@ -41,7 +41,7 @@ public class TestCellBuilder {
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setType(CellBuilder.DataType.Put)
.setType(Cell.DataType.Put)
.setValue(value)
.build();
row[0] = NEW_DATA;
@ -64,7 +64,7 @@ public class TestCellBuilder {
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setType(CellBuilder.DataType.Put)
.setType(Cell.DataType.Put)
.setValue(value)
.build();
row[0] = NEW_DATA;

View File

@ -198,6 +198,11 @@ public class TestCellUtil {
// TODO Auto-generated method stub
return 0;
}
@Override
public DataType getType() {
return PrivateCellUtil.toDataType(getTypeByte());
}
};
/**
@ -613,5 +618,10 @@ public class TestCellUtil {
public int getTagsLength() {
return this.kv.getTagsLength();
}
@Override
public DataType getType() {
return PrivateCellUtil.toDataType(getTypeByte());
}
}
}

View File

@ -737,5 +737,10 @@ public class TestKeyValue extends TestCase {
public byte[] getTagsArray() {
return this.kv.getTagsArray();
}
@Override
public DataType getType() {
return PrivateCellUtil.toDataType(getTypeByte());
}
}
}

View File

@ -19,10 +19,22 @@
package org.apache.hadoop.hbase.client.example;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.TableName;
@ -39,18 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
/**
* Example on how to use HBase's {@link Connection} and {@link Table} in a
@ -226,7 +226,7 @@ public class MultiThreadedClientExample extends Configured implements Tool {
.setFamily(FAMILY)
.setQualifier(QUAL)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value)
.build());
puts.add(p);
@ -263,7 +263,7 @@ public class MultiThreadedClientExample extends Configured implements Tool {
.setFamily(FAMILY)
.setQualifier(QUAL)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value)
.build());
t.put(p);

View File

@ -22,7 +22,6 @@ import java.util.Optional;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilder.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@ -89,7 +88,7 @@ public class ValueRewritingObserver implements RegionObserver, RegionCoprocessor
cellBuilder.setFamily(CellUtil.cloneFamily(c));
cellBuilder.setQualifier(CellUtil.cloneQualifier(c));
cellBuilder.setTimestamp(c.getTimestamp());
cellBuilder.setType(DataType.Put);
cellBuilder.setType(Cell.DataType.Put);
// Make sure each cell gets a unique value
byte[] clonedValue = new byte[replacedValue.length];
System.arraycopy(replacedValue, 0, clonedValue, 0, replacedValue.length);

View File

@ -29,7 +29,6 @@ import java.util.stream.IntStream;
import org.apache.commons.lang3.mutable.MutableLong;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@ -80,7 +79,7 @@ public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObs
private Cell createCell(byte[] row, byte[] family, byte[] qualifier, long ts, long value) {
return CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row)
.setType(CellBuilder.DataType.Put).setFamily(family).setQualifier(qualifier)
.setType(Cell.DataType.Put).setFamily(family).setQualifier(qualifier)
.setTimestamp(ts).setValue(Bytes.toBytes(value)).build();
}
@ -250,7 +249,7 @@ public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObs
.setQualifier(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength())
.setValue(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())
.setType(CellBuilder.DataType.Put).setTimestamp(ts).build());
.setType(Cell.DataType.Put).setTimestamp(ts).build());
}
}
c.getEnvironment().getRegion().put(put);

View File

@ -46,7 +46,7 @@ public class TestPBCell {
@Test
public void testRoundTrip() {
final Cell cell = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"),
Bytes.toBytes("qual"), Bytes.toBytes("val"));
Bytes.toBytes("qual"), Bytes.toBytes("val"));
CellProtos.Cell c = ProtobufUtil.toCell(cell), decoded;
PositionedByteRange pbr = new SimplePositionedByteRange(c.getSerializedSize());
pbr.setPosition(0);
@ -54,6 +54,7 @@ public class TestPBCell {
pbr.setPosition(0);
decoded = CODEC.decode(pbr);
assertEquals(encodedLength, pbr.getPosition());
assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded)));
assertTrue(CellUtil.equals(cell, ProtobufUtil
.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded)));
}
}

View File

@ -17,15 +17,23 @@
*/
package org.apache.hadoop.hbase.util;
import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.yetus.audience.InterfaceAudience;
/**
@ -268,4 +276,30 @@ public class MapReduceCell extends ByteBufferCell implements ExtendedCell {
throw new RuntimeException(e);
}
}
@Override
public Optional<Tag> getTag(byte type) {
int length = getTagsLength();
int offset = getTagsOffset();
int pos = offset;
while (pos < offset + length) {
int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
return Optional
.ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE));
}
pos += TAG_LENGTH_SIZE + tagLen;
}
return Optional.ofNullable(null);
}
@Override
public List<Tag> getTags() {
List<Tag> tags = new ArrayList<>();
Iterator<Tag> tagsItr = PrivateCellUtil.tagsIterator(this);
while (tagsItr.hasNext()) {
tags.add(tagsItr.next());
}
return tags;
}
}

View File

@ -39,11 +39,10 @@ import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Append;
@ -56,6 +55,7 @@ import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class RowResource extends ResourceBase {
@ -246,7 +246,7 @@ public class RowResource extends ResourceBase {
.setFamily(parts[0])
.setQualifier(parts[1])
.setTimestamp(cell.getTimestamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(cell.getValue())
.build());
}
@ -321,7 +321,7 @@ public class RowResource extends ResourceBase {
.setFamily(parts[0])
.setQualifier(parts[1])
.setTimestamp(timestamp)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(message)
.build());
table = servlet.getTable(tableResource.getName());
@ -518,7 +518,7 @@ public class RowResource extends ResourceBase {
.setFamily(parts[0])
.setQualifier(parts[1])
.setTimestamp(cell.getTimestamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(cell.getValue())
.build());
if(Bytes.equals(col,

View File

@ -24,8 +24,8 @@ import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.RawCellBuilder;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionInfo;
@ -127,7 +127,7 @@ public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment<Reg
/**
* Returns a CellBuilder so that coprocessors can build cells. These cells can also include tags.
* Note that this builder does not support updating seqId of the cells
* @return the ExtendedCellBuilder
* @return the RawCellBuilder
*/
ExtendedCellBuilder getCellBuilder();
RawCellBuilder getCellBuilder();
}

View File

@ -35,7 +35,7 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseIOException;
@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.master.RackManager;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@ -184,7 +183,7 @@ public class FavoredNodeAssignmentHelper {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(FAVOREDNODES_QUALIFIER)
.setTimestamp(EnvironmentEdgeManager.currentTime())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(favoredNodes)
.build());
LOG.debug("Create the region " + regionInfo.getRegionNameAsString() +

View File

@ -26,37 +26,35 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZKNamespaceManager;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.constraint.ConstraintException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.yetus.audience.InterfaceAudience;
/**
* This is a helper class used internally to manage the namespace metadata that is stored in
@ -160,7 +158,7 @@ public class TableNamespaceManager {
.setFamily(TableDescriptorBuilder.NAMESPACE_FAMILY_INFO_BYTES)
.setQualifier(TableDescriptorBuilder.NAMESPACE_COL_DESC_BYTES)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(ProtobufUtil.toProtoNamespaceDescriptor(ns).toByteArray())
.build());
nsTable.put(p);

View File

@ -23,10 +23,11 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HConstants;
@ -51,7 +52,6 @@ import org.apache.zookeeper.KeeperException;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
* Store Region State to hbase:meta table.
*/
@ -185,7 +185,7 @@ public class RegionStateStore {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getServerNameColumn(replicaId))
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(regionLocation.getServerName()))
.build());
info.append(", regionLocation=").append(regionLocation);
@ -195,7 +195,7 @@ public class RegionStateStore {
.setFamily(HConstants.CATALOG_FAMILY)
.setQualifier(getStateColumn(replicaId))
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(state.name()))
.build());
LOG.info(info);

View File

@ -102,7 +102,7 @@ public final class MobUtils {
static {
List<Tag> tags = new ArrayList<>();
tags.add(MobConstants.MOB_REF_TAG);
REF_DELETE_MARKER_TAG_BYTES = Tag.fromList(tags);
REF_DELETE_MARKER_TAG_BYTES = TagUtil.fromList(tags);
}
/**
@ -502,7 +502,7 @@ public final class MobUtils {
// find the original mob files by this table name. For details please see cloning
// snapshot for mob files.
tags.add(tableNameTag);
return createMobRefCell(cell, fileName, Tag.fromList(tags));
return createMobRefCell(cell, fileName, TagUtil.fromList(tags));
}
public static Cell createMobRefCell(Cell cell, byte[] fileName, byte[] refCellTags) {

View File

@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@ -131,7 +132,7 @@ public class PartitionedMobCompactor extends MobCompactor {
tags.add(MobConstants.MOB_REF_TAG);
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE, tableName.getName());
tags.add(tableNameTag);
this.refCellTags = Tag.fromList(tags);
this.refCellTags = TagUtil.fromList(tags);
cryptoContext = EncryptionUtil.createEncryptionContext(copyOfConf, column);
}

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
@ -120,7 +121,7 @@ public class HMobStore extends HStore {
Tag tableNameTag = new ArrayBackedTag(TagType.MOB_TABLE_NAME_TAG_TYPE,
getTableName().getName());
tags.add(tableNameTag);
this.refCellTags = Tag.fromList(tags);
this.refCellTags = TagUtil.fromList(tags);
}
/**

View File

@ -7761,7 +7761,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
.setTimestamp(Math.max(currentCell.getTimestamp() + 1, now))
.setType(KeyValue.Type.Put.getCode())
.setValue(newValue, 0, newValue.length)
.setTags(Tag.fromList(tags))
.setTags(TagUtil.fromList(tags))
.build();
} else {
PrivateCellUtil.updateLatestStamp(delta, now);

View File

@ -38,13 +38,12 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.RawCellBuilder;
import org.apache.hadoop.hbase.RawCellBuilderFactory;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SharedConnection;
import org.apache.hadoop.hbase.client.Append;
@ -184,10 +183,9 @@ public class RegionCoprocessorHost
}
@Override
public ExtendedCellBuilder getCellBuilder() {
// do not allow seqId update.
public RawCellBuilder getCellBuilder() {
// We always do a DEEP_COPY only
return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY, false);
return RawCellBuilderFactory.create();
}
}

View File

@ -37,7 +37,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@ -173,7 +173,7 @@ public class AccessControlLists {
.setFamily(ACL_LIST_FAMILY)
.setQualifier(key)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value)
.build());
if (LOG.isDebugEnabled()) {

View File

@ -46,7 +46,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@ -57,13 +57,11 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.util.StreamUtils;
@ -74,6 +72,8 @@ import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService {
@ -218,7 +218,7 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
.setFamily(LABELS_TABLE_FAMILY)
.setQualifier(LABEL_QUALIFIER)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(SYSTEM_LABEL))
.build());
region.put(p);
@ -246,9 +246,9 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
.setFamily(LABELS_TABLE_FAMILY)
.setQualifier(LABEL_QUALIFIER)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(label)
.setTags(Tag.fromList(Arrays.asList(LABELS_TABLE_TAGS)))
.setTags(TagUtil.fromList(Arrays.asList(LABELS_TABLE_TAGS)))
.build());
if (LOG.isDebugEnabled()) {
LOG.debug("Adding the label " + labelStr);
@ -286,9 +286,9 @@ public class DefaultVisibilityLabelServiceImpl implements VisibilityLabelService
.setFamily(LABELS_TABLE_FAMILY)
.setQualifier(user)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(DUMMY_VALUE)
.setTags(Tag.fromList(Arrays.asList(LABELS_TABLE_TAGS)))
.setTags(TagUtil.fromList(Arrays.asList(LABELS_TABLE_TAGS)))
.build());
puts.add(p);
}

View File

@ -28,17 +28,16 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.WALEntryFilter;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class VisibilityReplicationEndpoint implements ReplicationEndpoint {

View File

@ -18,7 +18,13 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.CellBuilder;
import static junit.framework.TestCase.assertEquals;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CompatibilityFactory;
@ -43,12 +49,6 @@ import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import static junit.framework.TestCase.assertEquals;
/**
* This test sets the multi size WAAAAAY low and then checks to make sure that gets will still make
* progress.
@ -157,7 +157,7 @@ public class TestMultiRespectsLimits {
.setFamily(FAMILY)
.setQualifier(col)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(value)
.build());
t.put(p);

View File

@ -26,12 +26,10 @@ import java.util.TreeMap;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@ -192,7 +190,7 @@ public class MockHStoreFile extends HStoreFile {
public Optional<Cell> getLastKey() {
if (splitPoint != null) {
return Optional.of(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setType(CellBuilder.DataType.Put)
.setType(Cell.DataType.Put)
.setRow(Arrays.copyOf(splitPoint, splitPoint.length + 1)).build());
} else {
return Optional.empty();
@ -203,7 +201,7 @@ public class MockHStoreFile extends HStoreFile {
public Optional<Cell> midKey() throws IOException {
if (splitPoint != null) {
return Optional.of(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setType(CellBuilder.DataType.Put).setRow(splitPoint).build());
.setType(Cell.DataType.Put).setRow(splitPoint).build());
} else {
return Optional.empty();
}
@ -213,7 +211,7 @@ public class MockHStoreFile extends HStoreFile {
public Optional<Cell> getFirstKey() {
if (splitPoint != null) {
return Optional.of(CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setType(CellBuilder.DataType.Put).setRow(splitPoint, 0, splitPoint.length - 1)
.setType(Cell.DataType.Put).setRow(splitPoint, 0, splitPoint.length - 1)
.build());
} else {
return Optional.empty();

View File

@ -28,7 +28,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -154,7 +154,7 @@ public class TestCompactionLifeCycleTracker {
.setFamily(CF1)
.setQualifier(QUALIFIER)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(i))
.build()));
}
@ -167,7 +167,7 @@ public class TestCompactionLifeCycleTracker {
.setFamily(CF1)
.setQualifier(QUALIFIER)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(i))
.build()));
}

View File

@ -28,7 +28,7 @@ import java.io.InterruptedIOException;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseTestingUtility;
@ -200,7 +200,7 @@ public class TestFlushLifeCycleTracker {
.setFamily(CF)
.setQualifier(QUALIFIER)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(i))
.build()));
}
@ -234,7 +234,7 @@ public class TestFlushLifeCycleTracker {
.setFamily(CF)
.setQualifier(QUALIFIER)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(Bytes.toBytes(i))
.build()));
}

View File

@ -29,9 +29,9 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
@ -74,7 +74,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@ -89,7 +89,6 @@ import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.MiniHBaseCluster;
@ -97,6 +96,7 @@ import org.apache.hadoop.hbase.MultithreadedTestUtil;
import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
@ -158,7 +158,6 @@ import org.apache.hadoop.hbase.wal.FaultyFSLog;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALProvider;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
@ -6282,20 +6281,20 @@ public class TestHRegion {
.setRow(a)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build()),
// this is outside the region boundary
new Put(c).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(c)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build()),
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(b)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build())
};
@ -6331,13 +6330,13 @@ public class TestHRegion {
.setRow(a)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build()),
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(b)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.build()),
};

View File

@ -59,7 +59,6 @@ import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator;
@ -69,9 +68,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MemoryCompactionPolicy;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -1049,13 +1048,13 @@ public class TestHStore {
long seqId = 100;
long timestamp = System.currentTimeMillis();
Cell cell0 = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row).setFamily(family)
.setQualifier(qf1).setTimestamp(timestamp).setType(CellBuilder.DataType.Put)
.setQualifier(qf1).setTimestamp(timestamp).setType(Cell.DataType.Put)
.setValue(qf1).build();
PrivateCellUtil.setSequenceId(cell0, seqId);
testNumberOfMemStoreScannersAfterFlush(Arrays.asList(cell0), Collections.emptyList());
Cell cell1 = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row).setFamily(family)
.setQualifier(qf2).setTimestamp(timestamp).setType(CellBuilder.DataType.Put)
.setQualifier(qf2).setTimestamp(timestamp).setType(Cell.DataType.Put)
.setValue(qf1).build();
PrivateCellUtil.setSequenceId(cell1, seqId);
testNumberOfMemStoreScannersAfterFlush(Arrays.asList(cell0), Arrays.asList(cell1));
@ -1063,7 +1062,7 @@ public class TestHStore {
seqId = 101;
timestamp = System.currentTimeMillis();
Cell cell2 = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row2).setFamily(family)
.setQualifier(qf2).setTimestamp(timestamp).setType(CellBuilder.DataType.Put)
.setQualifier(qf2).setTimestamp(timestamp).setType(Cell.DataType.Put)
.setValue(qf1).build();
PrivateCellUtil.setSequenceId(cell2, seqId);
testNumberOfMemStoreScannersAfterFlush(Arrays.asList(cell0), Arrays.asList(cell1, cell2));
@ -1118,7 +1117,7 @@ public class TestHStore {
private Cell createCell(byte[] row, byte[] qualifier, long ts, long sequenceId, byte[] value)
throws IOException {
Cell c = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row).setFamily(family)
.setQualifier(qualifier).setTimestamp(ts).setType(CellBuilder.DataType.Put)
.setQualifier(qualifier).setTimestamp(ts).setType(Cell.DataType.Put)
.setValue(value).build();
PrivateCellUtil.setSequenceId(c, sequenceId);
return c;

View File

@ -39,9 +39,9 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.ClusterId;
import org.apache.hadoop.hbase.CoordinatedStateManager;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.RawCellBuilder;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.ClusterConnection;
@ -331,7 +331,7 @@ public class TestTokenAuthentication {
}
@Override
public ExtendedCellBuilder getCellBuilder() {
public RawCellBuilder getCellBuilder() {
return null;
}
});

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.AuthUtil;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
@ -111,7 +112,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer
.setFamily(LABELS_TABLE_FAMILY)
.setQualifier(auth)
.setTimestamp(p.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(DUMMY_VALUE)
.build());
}

View File

@ -38,10 +38,12 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.sasl.AuthorizeCallback;
import javax.security.sasl.SaslServer;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
@ -49,6 +51,7 @@ import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
@ -1350,7 +1353,7 @@ public class ThriftServerRunner implements Runnable {
.setFamily(famAndQf[0])
.setQualifier(famAndQf[1])
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(m.value != null ? getBytes(m.value)
: HConstants.EMPTY_BYTE_ARRAY)
.build());
@ -1418,7 +1421,7 @@ public class ThriftServerRunner implements Runnable {
.setFamily(famAndQf[0])
.setQualifier(famAndQf[1])
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(m.value != null ? getBytes(m.value)
: HConstants.EMPTY_BYTE_ARRAY)
.build());
@ -1901,7 +1904,7 @@ public class ThriftServerRunner implements Runnable {
.setFamily(famAndQf[0])
.setQualifier(famAndQf[1])
.setTimestamp(put.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(mput.value != null ? getBytes(mput.value)
: HConstants.EMPTY_BYTE_ARRAY)
.build());

View File

@ -28,7 +28,7 @@ import java.util.Map;
import org.apache.commons.collections4.MapUtils;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.Cell.DataType;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@ -227,7 +227,7 @@ public class ThriftUtilities {
.setFamily(columnValue.getFamily())
.setQualifier(columnValue.getQualifier())
.setTimestamp(columnValue.getTimestamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(columnValue.getValue())
.build());
} else {
@ -236,7 +236,7 @@ public class ThriftUtilities {
.setFamily(columnValue.getFamily())
.setQualifier(columnValue.getQualifier())
.setTimestamp(out.getTimeStamp())
.setType(CellBuilder.DataType.Put)
.setType(DataType.Put)
.setValue(columnValue.getValue())
.build());
}