diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index df2102a712f..96899d011ba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -37,6 +37,7 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Consistency; @@ -72,7 +73,6 @@ import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; - import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -1361,7 +1361,7 @@ public class MetaTableAccessor { .setFamily(HConstants.REPLICATION_BARRIER_FAMILY) .setQualifier(seqBytes) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(seqBytes) .build()) .add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) @@ -1369,7 +1369,7 @@ public class MetaTableAccessor { .setFamily(HConstants.REPLICATION_META_FAMILY) .setQualifier(tableNameCq) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(tableName) .build()); return put; @@ -1383,7 +1383,7 @@ public class MetaTableAccessor { .setFamily(HConstants.REPLICATION_META_FAMILY) .setQualifier(daughterNameCq) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(value) .build()); return put; @@ -1396,7 +1396,7 @@ public class MetaTableAccessor { .setFamily(HConstants.REPLICATION_META_FAMILY) .setQualifier(parentNameCq) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(value) .build()); return put; @@ -1413,7 +1413,7 @@ public class MetaTableAccessor { .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.SPLITA_QUALIFIER) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(RegionInfo.toByteArray(splitA)) .build()); } @@ -1423,7 +1423,7 @@ public class MetaTableAccessor { .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.SPLITB_QUALIFIER) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(RegionInfo.toByteArray(splitB)) .build()); } @@ -1732,7 +1732,7 @@ public class MetaTableAccessor { .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.MERGEA_QUALIFIER) .setTimestamp(putOfMerged.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(RegionInfo.toByteArray(regionA)) .build()) .add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) @@ -1740,7 +1740,7 @@ public class MetaTableAccessor { .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(HConstants.MERGEB_QUALIFIER) .setTimestamp(putOfMerged.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(RegionInfo.toByteArray(regionB)) .build()); @@ -1985,7 +1985,7 @@ public class MetaTableAccessor { .setFamily(HConstants.REPLICATION_POSITION_FAMILY) .setQualifier(Bytes.toBytes(peerId)) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(Math.abs(entry.getValue()))) .build()); puts.add(put); @@ -2153,7 +2153,7 @@ public class MetaTableAccessor { .setFamily(getCatalogFamily()) .setQualifier(HConstants.REGIONINFO_QUALIFIER) .setTimestamp(p.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(RegionInfo.toByteArray(hri)) .build()); return p; @@ -2170,7 +2170,7 @@ public class MetaTableAccessor { .setFamily(getCatalogFamily()) .setQualifier(getServerColumn(replicaId)) .setTimestamp(time) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(sn.getAddress().toString())) .build()) .add(builder.clear() @@ -2178,7 +2178,7 @@ public class MetaTableAccessor { .setFamily(getCatalogFamily()) .setQualifier(getStartCodeColumn(replicaId)) .setTimestamp(time) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(sn.getStartcode())) .build()) .add(builder.clear() @@ -2186,7 +2186,7 @@ public class MetaTableAccessor { .setFamily(getCatalogFamily()) .setQualifier(getSeqNumColumn(replicaId)) .setTimestamp(time) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(openSeqNum)) .build()); } @@ -2199,21 +2199,21 @@ public class MetaTableAccessor { .setFamily(getCatalogFamily()) .setQualifier(getServerColumn(replicaId)) .setTimestamp(now) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()) .add(builder.clear() .setRow(p.getRow()) .setFamily(getCatalogFamily()) .setQualifier(getStartCodeColumn(replicaId)) .setTimestamp(now) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()) .add(builder.clear() .setRow(p.getRow()) .setFamily(getCatalogFamily()) .setQualifier(getSeqNumColumn(replicaId)) .setTimestamp(now) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()); } @@ -2241,7 +2241,7 @@ public class MetaTableAccessor { .setFamily(HConstants.CATALOG_FAMILY) .setQualifier(getSeqNumColumn(replicaId)) .setTimestamp(time) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(openSeqNum)) .build()); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index 606728eb8ac..644d1e8bd2f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -194,6 +194,11 @@ public class KeyOnlyFilter extends FilterBase { return cell.getTypeByte(); } + @Override + public DataType getType() { + return cell.getType(); + } + @Override public long getSequenceId() { return 0; @@ -307,6 +312,11 @@ public class KeyOnlyFilter extends FilterBase { return 0; } + @Override + public DataType getType() { + return cell.getType(); + } + @Override public byte[] getValueArray() { if (lenAsVal) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index db8b1a75767..f07278142f9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -27,6 +27,7 @@ import com.google.protobuf.RpcController; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; import com.google.protobuf.TextFormat; + import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Method; @@ -37,9 +38,10 @@ import java.util.Map; import java.util.Map.Entry; import java.util.NavigableSet; import java.util.function.Function; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; @@ -535,7 +537,7 @@ public final class ProtobufUtil { .setFamily(family) .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) .setTimestamp(ts) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(qv.hasValue() ? qv.getValue().toByteArray() : null) .setTags(allTagsBytes) .build()); @@ -555,7 +557,7 @@ public final class ProtobufUtil { .setFamily(family) .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) .setTimestamp(ts) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(qv.hasValue() ? qv.getValue().toByteArray() : null) .build()); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index d9c699b31c5..c9ea5a5b085 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -38,13 +38,14 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.CacheEvictionStats; import org.apache.hadoop.hbase.CacheEvictionStatsBuilder; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; @@ -676,7 +677,7 @@ public final class ProtobufUtil { .setFamily(family) .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) .setTimestamp(ts) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(qv.hasValue() ? qv.getValue().toByteArray() : null) .setTags(allTagsBytes) .build()); @@ -696,7 +697,7 @@ public final class ProtobufUtil { .setFamily(family) .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) .setTimestamp(ts) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(qv.hasValue() ? qv.getValue().toByteArray() : null) .build()); } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java index edc8a5a624f..0ae2dfae119 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestPut.java @@ -24,8 +24,9 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; + import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -87,7 +88,7 @@ public class TestPut { .setFamily(family) .setQualifier(qualifier0) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(value0) .build()) .add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) @@ -95,7 +96,7 @@ public class TestPut { .setFamily(family) .setQualifier(qualifier1) .setTimestamp(ts1) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(value1) .build()); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java index 0ec78ad5717..bfd1eb99858 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseRpcControllerImpl.java @@ -177,6 +177,12 @@ public class TestHBaseRpcControllerImpl { // unused return null; } + + @Override + public DataType getType() { + // unused + return null; + } }; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java index 3522e2d34c7..713314ef906 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyOnlyKeyValue.java @@ -147,6 +147,10 @@ public class ByteBufferKeyOnlyKeyValue extends ByteBufferCell { return ByteBufferUtils.toByte(this.buf, this.offset + this.length - 1); } + public DataType getType() { + return PrivateCellUtil.toDataType(getTypeByte()); + } + @Override public long getSequenceId() { return 0; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java index beadaf6f3a9..870d87228d0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java @@ -17,9 +17,15 @@ */ package org.apache.hadoop.hbase; +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; + import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -344,4 +350,31 @@ public class ByteBufferKeyValue extends ByteBufferCell implements ExtendedCell { hash = 31 * hash + cell.getTypeByte(); return hash; } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsPosition(); + int pos = offset; + int tagLen; + while (pos < offset + length) { + ByteBuffer tagsBuffer = getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { + return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java index 2b99823c2dc..40f0a1c3e23 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java @@ -133,8 +133,7 @@ public interface Cell { /** * @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc - * @deprecated since 2.0.0, use appropriate {@link CellUtil#isDelete} or - * {@link CellUtil#isPut(Cell)} methods instead. This will be removed in 3.0.0. + * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Use {@link #getType()}. */ @Deprecated byte getTypeByte(); @@ -148,7 +147,9 @@ public interface Cell { * {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's * row is no longer involved in any operations that require strict consistency. * @return seqId (always > 0 if exists), or 0 if it no longer exists + * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. */ + @Deprecated long getSequenceId(); //7) Value @@ -173,12 +174,16 @@ public interface Cell { /** * Contiguous raw bytes representing tags that may start at any index in the containing array. * @return the tags byte array + * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. */ + @Deprecated byte[] getTagsArray(); /** * @return the first offset where the tags start in the Cell + * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. */ + @Deprecated int getTagsOffset(); /** @@ -190,6 +195,39 @@ public interface Cell { * less than Integer.MAX_VALUE. * * @return the total length of the tags in the Cell. + * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. Tags are are now internal. */ + @Deprecated int getTagsLength(); + + /** + * Returns the type of cell in a human readable format using {@link DataType} + * @return The data type this cell: one of Put, Delete, etc + */ + DataType getType(); + + /** + * The valid types for user to build the cell. Currently, This is subset of {@link KeyValue.Type}. + */ + public enum DataType { + Put((byte) 4), + + Delete((byte) 8), + + DeleteFamilyVersion((byte) 10), + + DeleteColumn((byte) 12), + + DeleteFamily((byte) 14); + + private final byte code; + + DataType(final byte c) { + this.code = c; + } + + public byte getCode() { + return this.code; + } + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java index aeff15ac1e7..e89ac3762a4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java @@ -26,18 +26,6 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Public public interface CellBuilder { - /** - * The valid types for user to build the cell. - * Currently, This is subset of {@link KeyValue.Type}. - */ - enum DataType { - Put, - Delete, - DeleteFamilyVersion, - DeleteColumn, - DeleteFamily - } - CellBuilder setRow(final byte[] row); CellBuilder setRow(final byte[] row, final int rOffset, final int rLength); @@ -49,7 +37,7 @@ public interface CellBuilder { CellBuilder setTimestamp(final long timestamp); - CellBuilder setType(final DataType type); + CellBuilder setType(final Cell.DataType type); CellBuilder setValue(final byte[] value); CellBuilder setValue(final byte[] value, final int vOffset, final int vLength); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 03cf610f15a..c116f319dfd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -591,40 +591,31 @@ public final class CellUtil { /** * Note : Now only CPs can create cell with tags using the CP environment * @return A new cell which is having the extra tags also added to it. - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. - * Use CP environment to build Cell using {@link ExtendedCellBuilder} + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * */ @Deprecated public static Cell createCell(Cell cell, List tags) { - return createCell(cell, Tag.fromList(tags)); + return PrivateCellUtil.createCell(cell, tags); } /** * Now only CPs can create cell with tags using the CP environment * @return A new cell which is having the extra tags also added to it. - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. - * Use CP environment to build Cell using {@link ExtendedCellBuilder} + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. */ @Deprecated public static Cell createCell(Cell cell, byte[] tags) { - if (cell instanceof ByteBufferCell) { - return new PrivateCellUtil.TagRewriteByteBufferCell((ByteBufferCell) cell, tags); - } - return new PrivateCellUtil.TagRewriteCell(cell, tags); + return PrivateCellUtil.createCell(cell, tags); } /** * Now only CPs can create cell with tags using the CP environment - * @deprecated As of HBase-2.0. Will be removed in HBase-3.0. - * Use CP environment to build Cell using {@link ExtendedCellBuilder} + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. */ @Deprecated public static Cell createCell(Cell cell, byte[] value, byte[] tags) { - if (cell instanceof ByteBufferCell) { - return new PrivateCellUtil.ValueAndTagRewriteByteBufferCell((ByteBufferCell) cell, value, - tags); - } - return new PrivateCellUtil.ValueAndTagRewriteCell(cell, value, tags); + return PrivateCellUtil.createCell(cell, value, tags); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java index 81ca0189777..31df296e1d2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCell.java @@ -131,4 +131,50 @@ public interface ExtendedCell extends RawCell, HeapSize, Cloneable { * @param ts buffer containing the timestamp value */ void setTimestamp(byte[] ts) throws IOException; + + /** + * A region-specific unique monotonically increasing sequence ID given to each Cell. It always + * exists for cells in the memstore but is not retained forever. It will be kept for + * {@link HConstants#KEEP_SEQID_PERIOD} days, but generally becomes irrelevant after the cell's + * row is no longer involved in any operations that require strict consistency. + * @return seqId (always > 0 if exists), or 0 if it no longer exists + */ + long getSequenceId(); + + /** + * Contiguous raw bytes representing tags that may start at any index in the containing array. + * @return the tags byte array + */ + byte[] getTagsArray(); + + /** + * @return the first offset where the tags start in the Cell + */ + int getTagsOffset(); + + /** + * HBase internally uses 2 bytes to store tags length in Cell. As the tags length is always a + * non-negative number, to make good use of the sign bit, the max of tags length is defined 2 * + * Short.MAX_VALUE + 1 = 65535. As a result, the return type is int, because a short is not + * capable of handling that. Please note that even if the return type is int, the max tags length + * is far less than Integer.MAX_VALUE. + * @return the total length of the tags in the Cell. + */ + int getTagsLength(); + + /** + * {@inheritDoc} + *

+ * Note : This does not expose the internal types of Cells like {@link KeyValue.Type#Maximum} and + * {@link KeyValue.Type#Minimum} + */ + @Override + default DataType getType() { + return PrivateCellUtil.toDataType(getTypeByte()); + } + + /** + * @return The byte representation of the KeyValue.TYPE of this cell: one of Put, Delete, etc + */ + byte getTypeByte(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java index 57fa44e5f0b..b964d67a1e8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase; +import java.util.List; + import org.apache.yetus.audience.InterfaceAudience; /** @@ -26,8 +28,8 @@ import org.apache.yetus.audience.InterfaceAudience; * Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance. * TODO: ditto for ByteBufferCell? */ -@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC) -public interface ExtendedCellBuilder extends CellBuilder { +@InterfaceAudience.Private +public interface ExtendedCellBuilder extends RawCellBuilder { @Override ExtendedCellBuilder setRow(final byte[] row); @Override @@ -47,7 +49,7 @@ public interface ExtendedCellBuilder extends CellBuilder { ExtendedCellBuilder setTimestamp(final long timestamp); @Override - ExtendedCellBuilder setType(final DataType type); + ExtendedCellBuilder setType(final Cell.DataType type); ExtendedCellBuilder setType(final byte type); @@ -62,11 +64,17 @@ public interface ExtendedCellBuilder extends CellBuilder { @Override ExtendedCellBuilder clear(); - // TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[] + // we have this method for performance reasons so that if one could create a cell directly from + // the tag byte[] of the cell without having to convert to a list of Tag(s) and again adding it + // back. ExtendedCellBuilder setTags(final byte[] tags); - // TODO : While creating RawCellBuilder allow 'Tag' to be passed instead of byte[] + // we have this method for performance reasons so that if one could create a cell directly from + // the tag byte[] of the cell without having to convert to a list of Tag(s) and again adding it + // back. ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength); + @Override + ExtendedCellBuilder setTags(List tags); /** * Internal usage. Be careful before you use this while building a cell * @param seqId set the seqId diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java index 38778fb96b0..f3acdf46f68 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java @@ -24,25 +24,17 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public final class ExtendedCellBuilderFactory { - public static ExtendedCellBuilder create(CellBuilderType type) { - return create(type, true); - } - /** * Allows creating a cell with the given CellBuilderType. * @param type the type of CellBuilder(DEEP_COPY or SHALLOW_COPY). - * @param allowSeqIdUpdate if seqId can be updated. CPs are not allowed to update - * the seqId * @return the cell that is created */ - public static ExtendedCellBuilder create(CellBuilderType type, boolean allowSeqIdUpdate) { + public static ExtendedCellBuilder create(CellBuilderType type) { switch (type) { case SHALLOW_COPY: - // CPs are not allowed to update seqID and they always use DEEP_COPY. So we have not - // passing 'allowSeqIdUpdate' to IndividualBytesFieldCellBuilder return new IndividualBytesFieldCellBuilder(); case DEEP_COPY: - return new KeyValueBuilder(allowSeqIdUpdate); + return new KeyValueBuilder(); default: throw new UnsupportedOperationException("The type:" + type + " is unsupported"); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java index 536dbdcb2c5..770b61d3277 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase; +import java.util.List; + import org.apache.commons.lang3.ArrayUtils; import org.apache.yetus.audience.InterfaceAudience; @@ -40,12 +42,6 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { protected byte[] tags = null; protected int tagsOffset = 0; protected int tagsLength = 0; - // Will go away once we do with RawCellBuilder - protected boolean allowSeqIdUpdate = false; - - public ExtendedCellBuilderImpl(boolean allowSeqIdUpdate) { - this.allowSeqIdUpdate = allowSeqIdUpdate; - } @Override public ExtendedCellBuilder setRow(final byte[] row) { @@ -93,8 +89,8 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { } @Override - public ExtendedCellBuilder setType(final DataType type) { - this.type = toKeyValueType(type); + public ExtendedCellBuilder setType(final Cell.DataType type) { + this.type = PrivateCellUtil.toTypeByte(type); return this; } @@ -130,13 +126,16 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { return this; } + @Override + public ExtendedCellBuilder setTags(List tags) { + byte[] tagBytes = TagUtil.fromList(tags); + return setTags(tagBytes); + } + @Override public ExtendedCellBuilder setSequenceId(final long seqId) { - if (allowSeqIdUpdate) { - this.seqId = seqId; - return this; - } - throw new UnsupportedOperationException("SeqId cannot be set on this cell"); + this.seqId = seqId; + return this; } private void checkBeforeBuild() { @@ -175,15 +174,4 @@ public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { tagsLength = 0; return this; } - - private static KeyValue.Type toKeyValueType(DataType type) { - switch (type) { - case Put: return KeyValue.Type.Put; - case Delete: return KeyValue.Type.Delete; - case DeleteColumn: return KeyValue.Type.DeleteColumn; - case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion; - case DeleteFamily: return KeyValue.Type.DeleteFamily; - default: throw new UnsupportedOperationException("Unsupported data type:" + type); - } - } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java index 7093b4b2dcf..a25bd195851 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java @@ -18,6 +18,13 @@ package org.apache.hadoop.hbase; +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + import org.apache.commons.lang3.ArrayUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @@ -297,4 +304,30 @@ public class IndividualBytesFieldCell implements ExtendedCell { public String toString() { return CellUtil.toString(this, true); } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE); + if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return Optional + .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java index 62febf85c95..8a0168e1ed2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java @@ -22,14 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl { - public IndividualBytesFieldCellBuilder() { - this(true); - } - - public IndividualBytesFieldCellBuilder(boolean allowSeqIdUpdate) { - super(allowSeqIdUpdate); - } - @Override public ExtendedCell innerBuild() { return new IndividualBytesFieldCell(row, rOffset, rLength, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 7093650af82..88e7d88c4d3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -19,6 +19,7 @@ */ package org.apache.hadoop.hbase; +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; import static org.apache.hadoop.hbase.util.Bytes.len; import java.io.DataInput; @@ -29,8 +30,10 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Optional; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -1521,19 +1524,6 @@ public class KeyValue implements ExtendedCell { return tagsLen; } - /** - * Returns any tags embedded in the KeyValue. Used in testcases. - * @return The tags - */ - @Override - public List getTags() { - int tagsLength = getTagsLength(); - if (tagsLength == 0) { - return EMPTY_ARRAY_LIST; - } - return TagUtil.asList(getTagsArray(), getTagsOffset(), tagsLength); - } - /** * @return the backing array of the entire KeyValue (all KeyValue fields are in a single array) */ @@ -2564,4 +2554,30 @@ public class KeyValue implements ExtendedCell { kv.setSequenceId(this.getSequenceId()); return kv; } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE); + if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return Optional + .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java index 4f019924739..9480b71e049 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java @@ -22,14 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private class KeyValueBuilder extends ExtendedCellBuilderImpl { - KeyValueBuilder() { - this(true); - } - - KeyValueBuilder(boolean allowSeqIdUpdate) { - super(allowSeqIdUpdate); - } - @Override protected ExtendedCell innerBuild() { KeyValue kv = new KeyValue(row, rOffset, rLength, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java index df080f3630c..e52ed841071 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java @@ -21,6 +21,7 @@ import static org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY; import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; import com.google.common.annotations.VisibleForTesting; + import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; @@ -31,6 +32,7 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Optional; + import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.io.HeapSize; @@ -43,6 +45,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.yetus.audience.InterfaceAudience; + /** * Utility methods helpful slinging {@link Cell} instances. It has more powerful and * rich set of APIs than those in {@link CellUtil} for internal usage. @@ -107,7 +110,7 @@ public final class PrivateCellUtil { * @return A new cell which is having the extra tags also added to it. */ public static Cell createCell(Cell cell, List tags) { - return createCell(cell, Tag.fromList(tags)); + return createCell(cell, TagUtil.fromList(tags)); } /** @@ -311,6 +314,32 @@ public final class PrivateCellUtil { Cell clonedBaseCell = ((ExtendedCell) this.cell).deepClone(); return new TagRewriteCell(clonedBaseCell, this.tags); } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE); + if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return Optional + .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } static class TagRewriteByteBufferCell extends ByteBufferCell implements ExtendedCell { @@ -544,6 +573,33 @@ public final class PrivateCellUtil { public int getTagsPosition() { return 0; } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsPosition(); + int pos = offset; + int tagLen; + while (pos < offset + length) { + ByteBuffer tagsBuffer = getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { + return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } static class ValueAndTagRewriteCell extends TagRewriteCell { @@ -928,7 +984,7 @@ public final class PrivateCellUtil { return CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } - private static Iterator tagsIterator(final ByteBuffer tags, final int offset, + public static Iterator tagsIterator(final ByteBuffer tags, final int offset, final int length) { return new Iterator() { private int pos = offset; @@ -1231,6 +1287,29 @@ public final class PrivateCellUtil { cell.getQualifierLength()); } + public static Cell.DataType toDataType(byte type) { + Type codeToType = KeyValue.Type.codeToType(type); + switch (codeToType) { + case Put: return Cell.DataType.Put; + case Delete: return Cell.DataType.Delete; + case DeleteColumn: return Cell.DataType.DeleteColumn; + case DeleteFamily: return Cell.DataType.DeleteFamily; + case DeleteFamilyVersion: return Cell.DataType.DeleteFamilyVersion; + default: throw new UnsupportedOperationException("Invalid type of cell "+type); + } + } + + public static KeyValue.Type toTypeByte(Cell.DataType type) { + switch (type) { + case Put: return KeyValue.Type.Put; + case Delete: return KeyValue.Type.Delete; + case DeleteColumn: return KeyValue.Type.DeleteColumn; + case DeleteFamilyVersion: return KeyValue.Type.DeleteFamilyVersion; + case DeleteFamily: return KeyValue.Type.DeleteFamily; + default: throw new UnsupportedOperationException("Unsupported data type:" + type); + } + } + /** * Compare cell's value against given comparator * @param cell @@ -1345,6 +1424,32 @@ public final class PrivateCellUtil { public int getTagsLength() { return 0; } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE); + if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return Optional + .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } /** @@ -1498,6 +1603,33 @@ public final class PrivateCellUtil { public int getValuePosition() { return 0; } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsPosition(); + int pos = offset; + int tagLen; + while (pos < offset + length) { + ByteBuffer tagsBuffer = getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + TAG_LENGTH_SIZE) == type) { + return Optional.ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } private static class FirstOnRowCell extends EmptyCell { @@ -1547,6 +1679,11 @@ public final class PrivateCellUtil { public byte getTypeByte() { return Type.Maximum.getCode(); } + + @Override + public DataType getType() { + throw new UnsupportedOperationException(); + } } private static class FirstOnRowByteBufferCell extends EmptyByteBufferCell { @@ -1597,6 +1734,11 @@ public final class PrivateCellUtil { public byte getTypeByte() { return Type.Maximum.getCode(); } + + @Override + public DataType getType() { + throw new UnsupportedOperationException(); + } } private static class LastOnRowByteBufferCell extends EmptyByteBufferCell { @@ -1647,6 +1789,11 @@ public final class PrivateCellUtil { public byte getTypeByte() { return Type.Minimum.getCode(); } + + @Override + public DataType getType() { + throw new UnsupportedOperationException(); + } } private static class FirstOnRowColByteBufferCell extends FirstOnRowByteBufferCell { @@ -1875,6 +2022,11 @@ public final class PrivateCellUtil { public byte getTypeByte() { return Type.Minimum.getCode(); } + + @Override + public DataType getType() { + throw new UnsupportedOperationException(); + } } private static class LastOnRowColCell extends LastOnRowCell { @@ -2060,6 +2212,11 @@ public final class PrivateCellUtil { public byte getTypeByte() { return Type.DeleteFamily.getCode(); } + + @Override + public DataType getType() { + return DataType.DeleteFamily; + } } /** @@ -2890,5 +3047,4 @@ public final class PrivateCellUtil { public static Cell createFirstDeleteFamilyCellOnRow(final byte[] row, final byte[] fam) { return new FirstOnRowDeleteFamilyCell(row, fam); } - } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java index 9e25a9a5f14..4cda7d59fed 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/RawCell.java @@ -41,19 +41,14 @@ public interface RawCell extends Cell { * Creates a list of tags in the current cell * @return a list of tags */ - default List getTags() { - return PrivateCellUtil.getTags(this); - } + List getTags(); /** * Returns the specific tag of the given type * @param type the type of the tag * @return the specific tag if available or null */ - // TODO : Move to individual cell impl - default Optional getTag(byte type) { - return PrivateCellUtil.getTag(this, type); - } + Optional getTag(byte type); /** * Check the length of tags. If it is invalid, throw IllegalArgumentException diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java index 8709814dfb7..6f9bfdc226b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Tag.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase; import java.nio.ByteBuffer; -import java.util.List; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -152,38 +151,6 @@ public interface Tag { } } - /** - * Write a list of tags into a byte array - * @param tags The list of tags - * @return the serialized tag data as bytes - */ - // TODO : Remove this when we move to RawCellBuilder - public static byte[] fromList(List tags) { - if (tags == null || tags.isEmpty()) { - return HConstants.EMPTY_BYTE_ARRAY; - } - int length = 0; - for (Tag tag : tags) { - length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; - } - byte[] b = new byte[length]; - int pos = 0; - int tlen; - for (Tag tag : tags) { - tlen = tag.getValueLength(); - pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); - pos = Bytes.putByte(b, pos, tag.getType()); - if (tag.hasArray()) { - pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); - } else { - ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), - pos, tlen); - pos += tlen; - } - } - return b; - } - /** * Converts the value bytes of the given tag into a long value * @param tag The Tag diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java index 6ad66baf1ad..34c78a531b7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TagUtil.java @@ -136,6 +136,41 @@ public final class TagUtil { return tags; } + /** + * Write a list of tags into a byte array + * Note : these are all purely internal APIs. It helps in + * cases where we have set of tags and we would want to create a cell out of it. Say in Mobs we + * create a reference tags to indicate the presence of mob data. Also note that these are not + * exposed to CPs also + * @param tags The list of tags + * @return the serialized tag data as bytes + */ + public static byte[] fromList(List tags) { + if (tags == null || tags.isEmpty()) { + return HConstants.EMPTY_BYTE_ARRAY; + } + int length = 0; + for (Tag tag : tags) { + length += tag.getValueLength() + Tag.INFRASTRUCTURE_SIZE; + } + byte[] b = new byte[length]; + int pos = 0; + int tlen; + for (Tag tag : tags) { + tlen = tag.getValueLength(); + pos = Bytes.putAsShort(b, pos, tlen + Tag.TYPE_LENGTH_SIZE); + pos = Bytes.putByte(b, pos, tag.getType()); + if (tag.hasArray()) { + pos = Bytes.putBytes(b, pos, tag.getValueArray(), tag.getValueOffset(), tlen); + } else { + ByteBufferUtils.copyFromBufferToArray(b, tag.getValueByteBuffer(), tag.getValueOffset(), + pos, tlen); + pos += tlen; + } + } + return b; + } + /** * Iterator returned when no Tags. Used by CellUtil too. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java index 9bcda017e5c..f4d3c40d223 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java @@ -21,8 +21,14 @@ import java.io.DataOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ByteBufferCell; +import org.apache.hadoop.hbase.ByteBufferTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; @@ -32,6 +38,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.util.LRUDictionary; import org.apache.hadoop.hbase.io.util.StreamUtils; @@ -475,6 +482,32 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { // This is not used in actual flow. Throwing UnsupportedOperationException throw new UnsupportedOperationException(); } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(getTagsArray(), pos, Tag.TAG_LENGTH_SIZE); + if (getTagsArray()[pos + Tag.TAG_LENGTH_SIZE] == type) { + return Optional + .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + Tag.TAG_LENGTH_SIZE)); + } + pos += Tag.TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } protected static class OffheapDecodedCell extends ByteBufferCell implements ExtendedCell { @@ -720,6 +753,35 @@ abstract class BufferedDataBlockEncoder extends AbstractDataBlockEncoder { // This is not used in actual flow. Throwing UnsupportedOperationException throw new UnsupportedOperationException(); } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsPosition(); + int pos = offset; + int tagLen; + while (pos < offset + length) { + ByteBuffer tagsBuffer = getTagsByteBuffer(); + tagLen = ByteBufferUtils.readAsInt(tagsBuffer, pos, Tag.TAG_LENGTH_SIZE); + if (ByteBufferUtils.toByte(tagsBuffer, pos + Tag.TAG_LENGTH_SIZE) == type) { + return Optional + .ofNullable(new ByteBufferTag(tagsBuffer, pos, tagLen + Tag.TAG_LENGTH_SIZE)); + } + pos += Tag.TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } + } protected abstract static class BufferedEncodedSeeker diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java index ad185470f20..5c6c65af98d 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java @@ -41,7 +41,7 @@ public class TestCellBuilder { .setRow(row) .setFamily(family) .setQualifier(qualifier) - .setType(CellBuilder.DataType.Put) + .setType(Cell.DataType.Put) .setValue(value) .build(); row[0] = NEW_DATA; @@ -64,7 +64,7 @@ public class TestCellBuilder { .setRow(row) .setFamily(family) .setQualifier(qualifier) - .setType(CellBuilder.DataType.Put) + .setType(Cell.DataType.Put) .setValue(value) .build(); row[0] = NEW_DATA; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index 0395c0906ee..4ab6bce85cf 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -198,6 +198,11 @@ public class TestCellUtil { // TODO Auto-generated method stub return 0; } + + @Override + public DataType getType() { + return PrivateCellUtil.toDataType(getTypeByte()); + } }; /** @@ -613,5 +618,10 @@ public class TestCellUtil { public int getTagsLength() { return this.kv.getTagsLength(); } + + @Override + public DataType getType() { + return PrivateCellUtil.toDataType(getTypeByte()); + } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 86891ae77fe..c6b726527b3 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -737,5 +737,10 @@ public class TestKeyValue extends TestCase { public byte[] getTagsArray() { return this.kv.getTagsArray(); } + + @Override + public DataType getType() { + return PrivateCellUtil.toDataType(getTypeByte()); + } } } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java index 5d95fde5113..e460316515b 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java @@ -19,10 +19,22 @@ package org.apache.hadoop.hbase.client.example; import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.Future; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.TableName; @@ -39,18 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ForkJoinPool; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; - /** * Example on how to use HBase's {@link Connection} and {@link Table} in a @@ -226,7 +226,7 @@ public class MultiThreadedClientExample extends Configured implements Tool { .setFamily(FAMILY) .setQualifier(QUAL) .setTimestamp(p.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(value) .build()); puts.add(p); @@ -263,7 +263,7 @@ public class MultiThreadedClientExample extends Configured implements Tool { .setFamily(FAMILY) .setQualifier(QUAL) .setTimestamp(p.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(value) .build()); t.put(p); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java index 863ea8947f7..cf7796b16cc 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/ValueRewritingObserver.java @@ -22,7 +22,6 @@ import java.util.Optional; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilder; -import org.apache.hadoop.hbase.CellBuilder.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; @@ -89,7 +88,7 @@ public class ValueRewritingObserver implements RegionObserver, RegionCoprocessor cellBuilder.setFamily(CellUtil.cloneFamily(c)); cellBuilder.setQualifier(CellUtil.cloneQualifier(c)); cellBuilder.setTimestamp(c.getTimestamp()); - cellBuilder.setType(DataType.Put); + cellBuilder.setType(Cell.DataType.Put); // Make sure each cell gets a unique value byte[] clonedValue = new byte[replacedValue.length]; System.arraycopy(replacedValue, 0, clonedValue, 0, replacedValue.length); diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java index 55d9ac324d1..63637b50aa8 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.java @@ -29,7 +29,6 @@ import java.util.stream.IntStream; import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; @@ -80,7 +79,7 @@ public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObs private Cell createCell(byte[] row, byte[] family, byte[] qualifier, long ts, long value) { return CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row) - .setType(CellBuilder.DataType.Put).setFamily(family).setQualifier(qualifier) + .setType(Cell.DataType.Put).setFamily(family).setQualifier(qualifier) .setTimestamp(ts).setValue(Bytes.toBytes(value)).build(); } @@ -250,7 +249,7 @@ public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObs .setQualifier(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) .setValue(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()) - .setType(CellBuilder.DataType.Put).setTimestamp(ts).build()); + .setType(Cell.DataType.Put).setTimestamp(ts).build()); } } c.getEnvironment().getRegion().put(put); diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java index 7f94f9328cd..77c9e22d530 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java @@ -46,7 +46,7 @@ public class TestPBCell { @Test public void testRoundTrip() { final Cell cell = new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("fam"), - Bytes.toBytes("qual"), Bytes.toBytes("val")); + Bytes.toBytes("qual"), Bytes.toBytes("val")); CellProtos.Cell c = ProtobufUtil.toCell(cell), decoded; PositionedByteRange pbr = new SimplePositionedByteRange(c.getSerializedSize()); pbr.setPosition(0); @@ -54,6 +54,7 @@ public class TestPBCell { pbr.setPosition(0); decoded = CODEC.decode(pbr); assertEquals(encodedLength, pbr.getPosition()); - assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded))); + assertTrue(CellUtil.equals(cell, ProtobufUtil + .toCell(ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded))); } } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java index 38ff59b4093..ae47e7a6242 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/util/MapReduceCell.java @@ -17,15 +17,23 @@ */ package org.apache.hadoop.hbase.util; +import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE; + import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.PrivateCellUtil; +import org.apache.hadoop.hbase.Tag; import org.apache.yetus.audience.InterfaceAudience; /** @@ -268,4 +276,30 @@ public class MapReduceCell extends ByteBufferCell implements ExtendedCell { throw new RuntimeException(e); } } + + @Override + public Optional getTag(byte type) { + int length = getTagsLength(); + int offset = getTagsOffset(); + int pos = offset; + while (pos < offset + length) { + int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE); + if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) { + return Optional + .ofNullable(new ArrayBackedTag(getTagsArray(), pos, tagLen + TAG_LENGTH_SIZE)); + } + pos += TAG_LENGTH_SIZE + tagLen; + } + return Optional.ofNullable(null); + } + + @Override + public List getTags() { + List tags = new ArrayList<>(); + Iterator tagsItr = PrivateCellUtil.tagsIterator(this); + while (tagsItr.hasNext()) { + tags.add(tagsItr.next()); + } + return tags; + } } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java index dead804ca27..8c1cb5b2cfe 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java @@ -39,11 +39,10 @@ import javax.ws.rs.core.UriInfo; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Append; @@ -56,6 +55,7 @@ import org.apache.hadoop.hbase.rest.model.CellModel; import org.apache.hadoop.hbase.rest.model.CellSetModel; import org.apache.hadoop.hbase.rest.model.RowModel; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.yetus.audience.InterfaceAudience; @InterfaceAudience.Private public class RowResource extends ResourceBase { @@ -246,7 +246,7 @@ public class RowResource extends ResourceBase { .setFamily(parts[0]) .setQualifier(parts[1]) .setTimestamp(cell.getTimestamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(cell.getValue()) .build()); } @@ -321,7 +321,7 @@ public class RowResource extends ResourceBase { .setFamily(parts[0]) .setQualifier(parts[1]) .setTimestamp(timestamp) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(message) .build()); table = servlet.getTable(tableResource.getName()); @@ -518,7 +518,7 @@ public class RowResource extends ResourceBase { .setFamily(parts[0]) .setQualifier(parts[1]) .setTimestamp(cell.getTimestamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(cell.getValue()) .build()); if(Bytes.equals(col, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java index 3380639345d..84e6d25e769 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java @@ -24,8 +24,8 @@ import java.util.concurrent.ConcurrentMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoprocessorEnvironment; -import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.HBaseInterfaceAudience; +import org.apache.hadoop.hbase.RawCellBuilder; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.RegionInfo; @@ -127,7 +127,7 @@ public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment getLastKey() { if (splitPoint != null) { return Optional.of(CellBuilderFactory.create(CellBuilderType.DEEP_COPY) - .setType(CellBuilder.DataType.Put) + .setType(Cell.DataType.Put) .setRow(Arrays.copyOf(splitPoint, splitPoint.length + 1)).build()); } else { return Optional.empty(); @@ -203,7 +201,7 @@ public class MockHStoreFile extends HStoreFile { public Optional midKey() throws IOException { if (splitPoint != null) { return Optional.of(CellBuilderFactory.create(CellBuilderType.DEEP_COPY) - .setType(CellBuilder.DataType.Put).setRow(splitPoint).build()); + .setType(Cell.DataType.Put).setRow(splitPoint).build()); } else { return Optional.empty(); } @@ -213,7 +211,7 @@ public class MockHStoreFile extends HStoreFile { public Optional getFirstKey() { if (splitPoint != null) { return Optional.of(CellBuilderFactory.create(CellBuilderType.DEEP_COPY) - .setType(CellBuilder.DataType.Put).setRow(splitPoint, 0, splitPoint.length - 1) + .setType(Cell.DataType.Put).setRow(splitPoint, 0, splitPoint.length - 1) .build()); } else { return Optional.empty(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java index f7b78981bff..40e941deaad 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionLifeCycleTracker.java @@ -28,7 +28,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -154,7 +154,7 @@ public class TestCompactionLifeCycleTracker { .setFamily(CF1) .setQualifier(QUALIFIER) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(i)) .build())); } @@ -167,7 +167,7 @@ public class TestCompactionLifeCycleTracker { .setFamily(CF1) .setQualifier(QUALIFIER) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(i)) .build())); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java index 80bd906b68e..fc8fe4105bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestFlushLifeCycleTracker.java @@ -28,7 +28,7 @@ import java.io.InterruptedIOException; import java.util.Optional; import java.util.concurrent.CountDownLatch; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.HBaseTestingUtility; @@ -200,7 +200,7 @@ public class TestFlushLifeCycleTracker { .setFamily(CF) .setQualifier(QUALIFIER) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(i)) .build())); } @@ -234,7 +234,7 @@ public class TestFlushLifeCycleTracker { .setFamily(CF) .setQualifier(QUALIFIER) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(Bytes.toBytes(i)) .build())); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index acbdf7d737c..3482955b2ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -29,9 +29,9 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Matchers.anyLong; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -74,7 +74,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; @@ -89,7 +89,6 @@ import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.MiniHBaseCluster; @@ -97,6 +96,7 @@ import org.apache.hadoop.hbase.MultithreadedTestUtil; import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread; import org.apache.hadoop.hbase.NotServingRegionException; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.RegionTooBusyException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; @@ -158,7 +158,6 @@ import org.apache.hadoop.hbase.wal.FaultyFSLog; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALFactory; -import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKeyImpl; import org.apache.hadoop.hbase.wal.WALProvider; import org.apache.hadoop.hbase.wal.WALProvider.Writer; @@ -6282,20 +6281,20 @@ public class TestHRegion { .setRow(a) .setFamily(fam1) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()), // this is outside the region boundary new Put(c).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) .setRow(c) .setFamily(fam1) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()), new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) .setRow(b) .setFamily(fam1) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()) }; @@ -6331,13 +6330,13 @@ public class TestHRegion { .setRow(a) .setFamily(fam1) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()), new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) .setRow(b) .setFamily(fam1) .setTimestamp(HConstants.LATEST_TIMESTAMP) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .build()), }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java index 39ed9dfa5b8..61958483921 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStore.java @@ -59,7 +59,6 @@ import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; @@ -69,9 +68,9 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MemoryCompactionPolicy; +import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; @@ -1049,13 +1048,13 @@ public class TestHStore { long seqId = 100; long timestamp = System.currentTimeMillis(); Cell cell0 = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row).setFamily(family) - .setQualifier(qf1).setTimestamp(timestamp).setType(CellBuilder.DataType.Put) + .setQualifier(qf1).setTimestamp(timestamp).setType(Cell.DataType.Put) .setValue(qf1).build(); PrivateCellUtil.setSequenceId(cell0, seqId); testNumberOfMemStoreScannersAfterFlush(Arrays.asList(cell0), Collections.emptyList()); Cell cell1 = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row).setFamily(family) - .setQualifier(qf2).setTimestamp(timestamp).setType(CellBuilder.DataType.Put) + .setQualifier(qf2).setTimestamp(timestamp).setType(Cell.DataType.Put) .setValue(qf1).build(); PrivateCellUtil.setSequenceId(cell1, seqId); testNumberOfMemStoreScannersAfterFlush(Arrays.asList(cell0), Arrays.asList(cell1)); @@ -1063,7 +1062,7 @@ public class TestHStore { seqId = 101; timestamp = System.currentTimeMillis(); Cell cell2 = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row2).setFamily(family) - .setQualifier(qf2).setTimestamp(timestamp).setType(CellBuilder.DataType.Put) + .setQualifier(qf2).setTimestamp(timestamp).setType(Cell.DataType.Put) .setValue(qf1).build(); PrivateCellUtil.setSequenceId(cell2, seqId); testNumberOfMemStoreScannersAfterFlush(Arrays.asList(cell0), Arrays.asList(cell1, cell2)); @@ -1118,7 +1117,7 @@ public class TestHStore { private Cell createCell(byte[] row, byte[] qualifier, long ts, long sequenceId, byte[] value) throws IOException { Cell c = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(row).setFamily(family) - .setQualifier(qualifier).setTimestamp(ts).setType(CellBuilder.DataType.Put) + .setQualifier(qualifier).setTimestamp(ts).setType(Cell.DataType.Put) .setValue(value).build(); PrivateCellUtil.setSequenceId(c, sequenceId); return c; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index e8d8b7e7ea1..d61f98e3c6a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -39,9 +39,9 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.CoordinatedStateManager; -import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.RawCellBuilder; import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.ClusterConnection; @@ -331,7 +331,7 @@ public class TestTokenAuthentication { } @Override - public ExtendedCellBuilder getCellBuilder() { + public RawCellBuilder getCellBuilder() { return null; } }); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java index cfe415795fd..29ddfce402b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/ExpAsStringVisibilityLabelServiceImpl.java @@ -38,6 +38,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.AuthUtil; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; @@ -111,7 +112,7 @@ public class ExpAsStringVisibilityLabelServiceImpl implements VisibilityLabelSer .setFamily(LABELS_TABLE_FAMILY) .setQualifier(auth) .setTimestamp(p.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(DUMMY_VALUE) .build()); } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 98e62eb2062..27850ef0215 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -38,10 +38,12 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; + import javax.security.auth.callback.Callback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.sasl.AuthorizeCallback; import javax.security.sasl.SaslServer; + import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionGroup; @@ -49,6 +51,7 @@ import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; @@ -1350,7 +1353,7 @@ public class ThriftServerRunner implements Runnable { .setFamily(famAndQf[0]) .setQualifier(famAndQf[1]) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY) .build()); @@ -1418,7 +1421,7 @@ public class ThriftServerRunner implements Runnable { .setFamily(famAndQf[0]) .setQualifier(famAndQf[1]) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(m.value != null ? getBytes(m.value) : HConstants.EMPTY_BYTE_ARRAY) .build()); @@ -1901,7 +1904,7 @@ public class ThriftServerRunner implements Runnable { .setFamily(famAndQf[0]) .setQualifier(famAndQf[1]) .setTimestamp(put.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(mput.value != null ? getBytes(mput.value) : HConstants.EMPTY_BYTE_ARRAY) .build()); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 59fe1f4e685..ced9a0284e9 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -28,7 +28,7 @@ import java.util.Map; import org.apache.commons.collections4.MapUtils; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.Cell.DataType; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; @@ -227,7 +227,7 @@ public class ThriftUtilities { .setFamily(columnValue.getFamily()) .setQualifier(columnValue.getQualifier()) .setTimestamp(columnValue.getTimestamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(columnValue.getValue()) .build()); } else { @@ -236,7 +236,7 @@ public class ThriftUtilities { .setFamily(columnValue.getFamily()) .setQualifier(columnValue.getQualifier()) .setTimestamp(out.getTimeStamp()) - .setType(CellBuilder.DataType.Put) + .setType(DataType.Put) .setValue(columnValue.getValue()) .build()); }