From 0142c09217aa4f93cbf7410bbc8c2bce20ff772f Mon Sep 17 00:00:00 2001 From: Chia-Ping Tsai Date: Sun, 27 Aug 2017 20:12:28 +0800 Subject: [PATCH] HBASE-18519 Use builder pattern to create cell --- .../hadoop/hbase/protobuf/ProtobufUtil.java | 51 ++++-- .../hbase/shaded/protobuf/ProtobufUtil.java | 63 ++++--- .../org/apache/hadoop/hbase/CellBuilder.java | 52 ++++++ .../hadoop/hbase/CellBuilderFactory.java | 52 ++++++ .../apache/hadoop/hbase/CellBuilderType.java | 38 ++++ .../org/apache/hadoop/hbase/CellUtil.java | 79 ++++++--- .../hadoop/hbase/ExtendedCellBuilder.java | 67 +++++++ .../hbase/ExtendedCellBuilderFactory.java | 40 +++++ .../hadoop/hbase/ExtendedCellBuilderImpl.java | 163 ++++++++++++++++++ .../hbase/IndividualBytesFieldCell.java | 146 +++++++++++----- .../IndividualBytesFieldCellBuilder.java | 33 ++++ .../apache/hadoop/hbase/KeyValueBuilder.java | 35 ++++ .../apache/hadoop/hbase/codec/CellCodec.java | 15 +- .../hadoop/hbase/codec/CellCodecWithTags.java | 16 +- .../apache/hadoop/hbase/TestCellBuilder.java | 139 +++++++++++++++ .../hbase/TestIndividualBytesFieldCell.java | 73 +++++++- .../apache/hadoop/hbase/types/TestPBCell.java | 4 +- .../hadoop/hbase/codec/MessageCodec.java | 13 +- .../hadoop/hbase/regionserver/HMobStore.java | 22 +-- .../hadoop/hbase/regionserver/HRegion.java | 19 +- .../hbase/replication/BulkLoadCellFilter.java | 14 +- .../security/access/AccessController.java | 10 +- .../hbase/protobuf/TestProtobufUtil.java | 4 +- .../shaded/protobuf/TestProtobufUtil.java | 4 +- 24 files changed, 1002 insertions(+), 150 deletions(-) create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderFactory.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderType.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java create mode 100644 hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java create mode 100644 hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 753b0d31ad6..81ec05cd160 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -31,9 +31,14 @@ import java.util.function.Function; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.CellBuilderFactory; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; @@ -491,6 +496,7 @@ public final class ProtobufUtil { throw new IllegalArgumentException("row cannot be null"); } // The proto has the metadata and the data itself + ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (ColumnValue column: proto.getColumnValueList()) { byte[] family = column.getFamily().toByteArray(); for (QualifierValue qv: column.getQualifierValueList()) { @@ -510,9 +516,14 @@ public final class ProtobufUtil { if (qv.hasTags()) { allTagsBytes = qv.getTags().toByteArray(); if(qv.hasDeleteType()) { - byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; - put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, - fromDeleteType(qv.getDeleteType()), null, allTagsBytes)); + put.add(cellBuilder.clear() + .setRow(proto.getRow().toByteArray()) + .setFamily(family) + .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) + .setTimestamp(ts) + .setType(fromDeleteType(qv.getDeleteType()).getCode()) + .setTags(allTagsBytes) + .build()); } else { List tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); Tag[] tagsArray = new Tag[tags.size()]; @@ -520,9 +531,13 @@ public final class ProtobufUtil { } } else { if(qv.hasDeleteType()) { - byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; - put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, - fromDeleteType(qv.getDeleteType()))); + put.add(cellBuilder.clear() + .setRow(proto.getRow().toByteArray()) + .setFamily(family) + .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) + .setTimestamp(ts) + .setType(fromDeleteType(qv.getDeleteType()).getCode()) + .build()); } else{ put.addImmutable(family, qualifier, ts, value); } @@ -1314,8 +1329,9 @@ public final class ProtobufUtil { } List cells = new ArrayList<>(values.size()); + CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (CellProtos.Cell c : values) { - cells.add(toCell(c)); + cells.add(toCell(builder, c)); } return Result.create(cells, null, proto.getStale(), proto.getPartial()); } @@ -1356,8 +1372,9 @@ public final class ProtobufUtil { if (!values.isEmpty()){ if (cells == null) cells = new ArrayList<>(values.size()); + CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (CellProtos.Cell c: values) { - cells.add(toCell(c)); + cells.add(toCell(builder, c)); } } @@ -1616,15 +1633,15 @@ public final class ProtobufUtil { return kvbuilder.build(); } - public static Cell toCell(final CellProtos.Cell cell) { - // Doing this is going to kill us if we do it for all data passed. - // St.Ack 20121205 - return CellUtil.createCell(cell.getRow().toByteArray(), - cell.getFamily().toByteArray(), - cell.getQualifier().toByteArray(), - cell.getTimestamp(), - (byte)cell.getCellType().getNumber(), - cell.getValue().toByteArray()); + public static Cell toCell(CellBuilder cellBuilder, final CellProtos.Cell cell) { + return cellBuilder.clear() + .setRow(cell.getRow().toByteArray()) + .setFamily(cell.getFamily().toByteArray()) + .setQualifier(cell.getQualifier().toByteArray()) + .setTimestamp(cell.getTimestamp()) + .setType((byte) cell.getCellType().getNumber()) + .setValue(cell.getValue().toByteArray()) + .build(); } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index b6006f0811f..800ebad2832 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -41,11 +41,16 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.CellBuilderFactory; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HConstants; @@ -419,7 +424,6 @@ public final class ProtobufUtil { /** * Get a list of TableDescriptor from GetTableDescriptorsResponse protobuf - * * @param proto the GetTableDescriptorsResponse * @return a list of TableDescriptor */ @@ -616,6 +620,7 @@ public final class ProtobufUtil { throw new IllegalArgumentException("row cannot be null"); } // The proto has the metadata and the data itself + ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (ColumnValue column: proto.getColumnValueList()) { byte[] family = column.getFamily().toByteArray(); for (QualifierValue qv: column.getQualifierValueList()) { @@ -635,9 +640,14 @@ public final class ProtobufUtil { if (qv.hasTags()) { allTagsBytes = qv.getTags().toByteArray(); if(qv.hasDeleteType()) { - byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; - put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, - fromDeleteType(qv.getDeleteType()), null, allTagsBytes)); + put.add(cellBuilder.clear() + .setRow(proto.getRow().toByteArray()) + .setFamily(family) + .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) + .setTimestamp(ts) + .setType(fromDeleteType(qv.getDeleteType()).getCode()) + .setTags(allTagsBytes) + .build()); } else { List tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); Tag[] tagsArray = new Tag[tags.size()]; @@ -645,9 +655,13 @@ public final class ProtobufUtil { } } else { if(qv.hasDeleteType()) { - byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; - put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, - fromDeleteType(qv.getDeleteType()))); + put.add(cellBuilder.clear() + .setRow(proto.getRow().toByteArray()) + .setFamily(family) + .setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null) + .setTimestamp(ts) + .setType(fromDeleteType(qv.getDeleteType()).getCode()) + .build()); } else{ put.addImmutable(family, qualifier, ts, value); } @@ -787,8 +801,15 @@ public final class ProtobufUtil { if (qv.hasTags()) { tags = qv.getTags().toByteArray(); } - consumer.accept(mutation, CellUtil.createCell(mutation.getRow(), family, qualifier, qv.getTimestamp(), - KeyValue.Type.Put, value, tags)); + consumer.accept(mutation, ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) + .setRow(mutation.getRow()) + .setFamily(family) + .setQualifier(qualifier) + .setTimestamp(qv.getTimestamp()) + .setType(KeyValue.Type.Put.getCode()) + .setValue(value) + .setTags(tags) + .build()); } } } @@ -1461,8 +1482,9 @@ public final class ProtobufUtil { } List cells = new ArrayList<>(values.size()); + CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (CellProtos.Cell c : values) { - cells.add(toCell(c)); + cells.add(toCell(builder, c)); } return Result.create(cells, null, proto.getStale(), proto.getPartial()); } @@ -1503,8 +1525,9 @@ public final class ProtobufUtil { if (!values.isEmpty()){ if (cells == null) cells = new ArrayList<>(values.size()); + CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); for (CellProtos.Cell c: values) { - cells.add(toCell(c)); + cells.add(toCell(builder, c)); } } @@ -2009,15 +2032,15 @@ public final class ProtobufUtil { return UnsafeByteOperations.unsafeWrap(dup); } - public static Cell toCell(final CellProtos.Cell cell) { - // Doing this is going to kill us if we do it for all data passed. - // St.Ack 20121205 - return CellUtil.createCell(cell.getRow().toByteArray(), - cell.getFamily().toByteArray(), - cell.getQualifier().toByteArray(), - cell.getTimestamp(), - (byte)cell.getCellType().getNumber(), - cell.getValue().toByteArray()); + public static Cell toCell(CellBuilder cellBuilder, final CellProtos.Cell cell) { + return cellBuilder.clear() + .setRow(cell.getRow().toByteArray()) + .setFamily(cell.getFamily().toByteArray()) + .setQualifier(cell.getQualifier().toByteArray()) + .setTimestamp(cell.getTimestamp()) + .setType((byte) cell.getCellType().getNumber()) + .setValue(cell.getValue().toByteArray()) + .build(); } public static HBaseProtos.NamespaceDescriptor toProtoNamespaceDescriptor(NamespaceDescriptor ns) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java new file mode 100644 index 00000000000..3b8717ccf52 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilder.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * Use {@link CellBuilderFactory} to get CellBuilder instance. + */ +@InterfaceAudience.Public +public interface CellBuilder { + + CellBuilder setRow(final byte[] row); + CellBuilder setRow(final byte[] row, final int rOffset, final int rLength); + + CellBuilder setFamily(final byte[] family); + CellBuilder setFamily(final byte[] family, final int fOffset, final int fLength); + + CellBuilder setQualifier(final byte[] qualifier); + CellBuilder setQualifier(final byte[] qualifier, final int qOffset, final int qLength); + + CellBuilder setTimestamp(final long timestamp); + + CellBuilder setType(final byte type); + + CellBuilder setValue(final byte[] value); + CellBuilder setValue(final byte[] value, final int vOffset, final int vLength); + + Cell build(); + + /** + * Remove all internal elements from builder. + * @return this + */ + CellBuilder clear(); +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderFactory.java new file mode 100644 index 00000000000..c7829a41335 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderFactory.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * Create a CellBuilder instance. Currently, we have two kinds of Cell Builder. + * {@link CellBuilderType#DEEP_COPY} All bytes array passed into builder will be copied to build an new Cell. + * The cell impl is {@link org.apache.hadoop.hbase.KeyValue} + * {@link CellBuilderType#SHALLOW_COPY} Just copy the references of passed bytes array to build an new Cell + * The cell impl is {@link org.apache.hadoop.hbase.IndividualBytesFieldCell} + * NOTE: The cell impl may be changed in the future. The user application SHOULD NOT depend on any concrete cell impl. + */ +@InterfaceAudience.Public +public final class CellBuilderFactory { + + /** + * Create a CellBuilder instance. + * @param type indicates which memory copy is used in building cell. + * @return An new CellBuilder + */ + public static CellBuilder create(CellBuilderType type) { + switch (type) { + case SHALLOW_COPY: + return new IndividualBytesFieldCellBuilder(); + case DEEP_COPY: + return new KeyValueBuilder(); + default: + throw new UnsupportedOperationException("The type:" + type + " is unsupported"); + } + } + + private CellBuilderFactory(){ + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderType.java new file mode 100644 index 00000000000..be8eef075b4 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellBuilderType.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * Used by {@link CellBuilderFactory} and {@link ExtendedCellBuilderFactory}. + * Indicates which memory copy is used in building cell. + */ +@InterfaceAudience.Public +public enum CellBuilderType { + /** + * The cell builder will copy all passed bytes for building cell. + */ + DEEP_COPY, + /** + * DON'T modify the byte array passed to cell builder + * because all fields in new cell are reference to input arguments + */ + SHALLOW_COPY +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 03d8b705472..4a5023de31f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -316,73 +316,104 @@ public final class CellUtil { return buffer; } + /** + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link CellBuilder} instead + */ + @Deprecated public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier, final long timestamp, final byte type, final byte [] value) { - // I need a Cell Factory here. Using KeyValue for now. TODO. - // TODO: Make a new Cell implementation that just carries these - // byte arrays. - // TODO: Call factory to create Cell - return new KeyValue(row, family, qualifier, timestamp, KeyValue.Type.codeToType(type), value); + return CellBuilderFactory.create(CellBuilderType.DEEP_COPY) + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setTimestamp(timestamp) + .setType(type) + .setValue(value) + .build(); } + /** + * Creates a cell with deep copy of all passed bytes. + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link CellBuilder} instead + */ + @Deprecated public static Cell createCell(final byte [] rowArray, final int rowOffset, final int rowLength, final byte [] familyArray, final int familyOffset, final int familyLength, final byte [] qualifierArray, final int qualifierOffset, final int qualifierLength) { // See createCell(final byte [] row, final byte [] value) for why we default Maximum type. - return new KeyValue(rowArray, rowOffset, rowLength, - familyArray, familyOffset, familyLength, - qualifierArray, qualifierOffset, qualifierLength, - HConstants.LATEST_TIMESTAMP, - KeyValue.Type.Maximum, - HConstants.EMPTY_BYTE_ARRAY, 0, HConstants.EMPTY_BYTE_ARRAY.length); + return CellBuilderFactory.create(CellBuilderType.DEEP_COPY) + .setRow(rowArray, rowOffset, rowLength) + .setFamily(familyArray, familyOffset, familyLength) + .setQualifier(qualifierArray, qualifierOffset, qualifierLength) + .setTimestamp(HConstants.LATEST_TIMESTAMP) + .setType(KeyValue.Type.Maximum.getCode()) + .setValue(HConstants.EMPTY_BYTE_ARRAY, 0, HConstants.EMPTY_BYTE_ARRAY.length) + .build(); } /** * Marked as audience Private as of 1.2.0. * Creating a Cell with a memstoreTS/mvcc is an internal implementation detail not for * public use. + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link ExtendedCellBuilder} instead */ @InterfaceAudience.Private + @Deprecated public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, final long timestamp, final byte type, final byte[] value, final long memstoreTS) { - KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, - KeyValue.Type.codeToType(type), value); - keyValue.setSequenceId(memstoreTS); - return keyValue; + return createCell(row, family, qualifier, timestamp, type, value, null, memstoreTS); } /** * Marked as audience Private as of 1.2.0. * Creating a Cell with tags and a memstoreTS/mvcc is an internal implementation detail not for * public use. + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link ExtendedCellBuilder} instead */ @InterfaceAudience.Private + @Deprecated public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, final long timestamp, final byte type, final byte[] value, byte[] tags, final long memstoreTS) { - KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, - KeyValue.Type.codeToType(type), value, tags); - keyValue.setSequenceId(memstoreTS); - return keyValue; + return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setTimestamp(timestamp) + .setType(type) + .setValue(value) + .setTags(tags) + .setSequenceId(memstoreTS) + .build(); } /** * Marked as audience Private as of 1.2.0. * Creating a Cell with tags is an internal implementation detail not for * public use. + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link ExtendedCellBuilder} instead */ @InterfaceAudience.Private + @Deprecated public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, final long timestamp, Type type, final byte[] value, byte[] tags) { - KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, type, value, tags); - return keyValue; + return createCell(row, family, qualifier, timestamp, type.getCode(), value, + tags, 0); } /** * Create a Cell with specific row. Other fields defaulted. * @param row * @return Cell with passed row but all other fields are arbitrary + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link CellBuilder} instead */ + @Deprecated public static Cell createCell(final byte [] row) { return createCell(row, HConstants.EMPTY_BYTE_ARRAY); } @@ -392,7 +423,10 @@ public final class CellUtil { * @param row * @param value * @return Cell with passed row and value but all other fields are arbitrary + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link CellBuilder} instead */ + @Deprecated public static Cell createCell(final byte [] row, final byte [] value) { // An empty family + empty qualifier + Type.Minimum is used as flag to indicate last on row. // See the CellComparator and KeyValue comparator. Search for compareWithoutRow. @@ -408,7 +442,10 @@ public final class CellUtil { * @param family * @param qualifier * @return Cell with passed row but all other fields are arbitrary + * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. + * Use {@link CellBuilder} instead */ + @Deprecated public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier) { // See above in createCell(final byte [] row, final byte [] value) why we set type to Maximum. return createCell(row, family, qualifier, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java new file mode 100644 index 00000000000..a378ab8c3e1 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilder.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +/** + * For internal purpose. + * {@link Tag} and memstoreTS/mvcc are internal implementation detail + * that should not be exposed publicly. + * Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance. + * TODO: ditto for ByteBufferCell? + */ +@InterfaceAudience.Private +public interface ExtendedCellBuilder extends CellBuilder { + @Override + ExtendedCellBuilder setRow(final byte[] row); + @Override + ExtendedCellBuilder setRow(final byte[] row, final int rOffset, final int rLength); + + @Override + ExtendedCellBuilder setFamily(final byte[] family); + @Override + ExtendedCellBuilder setFamily(final byte[] family, final int fOffset, final int fLength); + + @Override + ExtendedCellBuilder setQualifier(final byte[] qualifier); + @Override + ExtendedCellBuilder setQualifier(final byte[] qualifier, final int qOffset, final int qLength); + + @Override + ExtendedCellBuilder setTimestamp(final long timestamp); + + @Override + ExtendedCellBuilder setType(final byte type); + + @Override + ExtendedCellBuilder setValue(final byte[] value); + @Override + ExtendedCellBuilder setValue(final byte[] value, final int vOffset, final int vLength); + + @Override + ExtendedCell build(); + + @Override + ExtendedCellBuilder clear(); + + ExtendedCellBuilder setTags(final byte[] tags); + ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength); + + ExtendedCellBuilder setSequenceId(final long seqId); +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java new file mode 100644 index 00000000000..87770fac4b9 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderFactory.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + + +@InterfaceAudience.Private +public final class ExtendedCellBuilderFactory { + + public static ExtendedCellBuilder create(CellBuilderType type) { + switch (type) { + case SHALLOW_COPY: + return new IndividualBytesFieldCellBuilder(); + case DEEP_COPY: + return new KeyValueBuilder(); + default: + throw new UnsupportedOperationException("The type:" + type + " is unsupported"); + } + } + + private ExtendedCellBuilderFactory(){ + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java new file mode 100644 index 00000000000..cd1a33122a1 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ExtendedCellBuilderImpl.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.util.ArrayUtils; + +@InterfaceAudience.Private +public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder { + protected byte[] row = null; + protected int rOffset = 0; + protected int rLength = 0; + protected byte[] family = null; + protected int fOffset = 0; + protected int fLength = 0; + protected byte[] qualifier = null; + protected int qOffset = 0; + protected int qLength = 0; + protected long timestamp = HConstants.LATEST_TIMESTAMP; + protected Byte type = null; + protected byte[] value = null; + protected int vOffset = 0; + protected int vLength = 0; + protected long seqId = 0; + protected byte[] tags = null; + protected int tagsOffset = 0; + protected int tagsLength = 0; + + @Override + public ExtendedCellBuilder setRow(final byte[] row) { + return setRow(row, 0, ArrayUtils.length(row)); + } + + @Override + public ExtendedCellBuilder setRow(final byte[] row, int rOffset, int rLength) { + this.row = row; + this.rOffset = rOffset; + this.rLength = rLength; + return this; + } + + @Override + public ExtendedCellBuilder setFamily(final byte[] family) { + return setFamily(family, 0, ArrayUtils.length(family)); + } + + @Override + public ExtendedCellBuilder setFamily(final byte[] family, int fOffset, int fLength) { + this.family = family; + this.fOffset = fOffset; + this.fLength = fLength; + return this; + } + + @Override + public ExtendedCellBuilder setQualifier(final byte[] qualifier) { + return setQualifier(qualifier, 0, ArrayUtils.length(qualifier)); + } + + @Override + public ExtendedCellBuilder setQualifier(final byte[] qualifier, int qOffset, int qLength) { + this.qualifier = qualifier; + this.qOffset = qOffset; + this.qLength = qLength; + return this; + } + + @Override + public ExtendedCellBuilder setTimestamp(final long timestamp) { + this.timestamp = timestamp; + return this; + } + + @Override + public ExtendedCellBuilder setType(final byte type) { + this.type = type; + return this; + } + + @Override + public ExtendedCellBuilder setValue(final byte[] value) { + return setValue(value, 0, ArrayUtils.length(value)); + } + + @Override + public ExtendedCellBuilder setValue(final byte[] value, int vOffset, int vLength) { + this.value = value; + this.vOffset = vOffset; + this.vLength = vLength; + return this; + } + + @Override + public ExtendedCellBuilder setTags(final byte[] tags) { + return setTags(tags, 0, ArrayUtils.length(tags)); + } + + @Override + public ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength) { + this.tags = tags; + this.tagsOffset = tagsOffset; + this.tagsLength = tagsLength; + return this; + } + + @Override + public ExtendedCellBuilder setSequenceId(final long seqId) { + this.seqId = seqId; + return this; + } + + private void checkBeforeBuild() { + if (type == null) { + throw new IllegalArgumentException("The type can't be NULL"); + } + } + + protected abstract ExtendedCell innerBuild(); + + @Override + public ExtendedCell build() { + checkBeforeBuild(); + return innerBuild(); + } + + @Override + public ExtendedCellBuilder clear() { + row = null; + rOffset = 0; + rLength = 0; + family = null; + fOffset = 0; + fLength = 0; + qualifier = null; + qOffset = 0; + qLength = 0; + timestamp = HConstants.LATEST_TIMESTAMP; + type = null; + value = null; + vOffset = 0; + vLength = 0; + seqId = 0; + tags = null; + tagsOffset = 0; + tagsLength = 0; + return this; + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java index 218a5313391..882152a64a3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCell.java @@ -18,16 +18,16 @@ package org.apache.hadoop.hbase; -import org.apache.hadoop.hbase.classification.InterfaceAudience; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.ClassSize; -import org.apache.hadoop.hbase.util.ByteBufferUtils; - import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import org.apache.hadoop.hbase.classification.InterfaceAudience; +import org.apache.hadoop.hbase.util.ArrayUtils; +import org.apache.hadoop.hbase.util.ByteBufferUtils; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ClassSize; + @InterfaceAudience.Private public class IndividualBytesFieldCell implements ExtendedCell { @@ -38,16 +38,26 @@ public class IndividualBytesFieldCell implements ExtendedCell { + 5 * ClassSize.REFERENCE); // references to all byte arrays: row, family, qualifier, value, tags // The following fields are backed by individual byte arrays - private byte[] row; - private byte[] family; - private byte[] qualifier; - private byte[] value; - private byte[] tags; // A byte array, rather than an array of org.apache.hadoop.hbase.Tag + private final byte[] row; + private final int rOffset; + private final int rLength; + private final byte[] family; + private final int fOffset; + private final int fLength; + private final byte[] qualifier; + private final int qOffset; + private final int qLength; + private final byte[] value; + private final int vOffset; + private final int vLength; + private final byte[] tags; // A byte array, rather than an array of org.apache.hadoop.hbase.Tag + private final int tagsOffset; + private final int tagsLength; // Other fields - private long timestamp; - private byte type; // A byte, rather than org.apache.hadoop.hbase.KeyValue.Type - private long seqId; + private long timestamp; + private final byte type; // A byte, rather than org.apache.hadoop.hbase.KeyValue.Type + private long seqId; public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier, long timestamp, KeyValue.Type type, byte[] value) { @@ -56,12 +66,26 @@ public class IndividualBytesFieldCell implements ExtendedCell { public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier, long timestamp, KeyValue.Type type, long seqId, byte[] value, byte[] tags) { + this(row, 0, ArrayUtils.length(row), + family, 0, ArrayUtils.length(family), + qualifier, 0, ArrayUtils.length(qualifier), + timestamp, type, seqId, + value, 0, ArrayUtils.length(value), + tags, 0, ArrayUtils.length(tags)); + } + + public IndividualBytesFieldCell(byte[] row, int rOffset, int rLength, + byte[] family, int fOffset, int fLength, + byte[] qualifier, int qOffset, int qLength, + long timestamp, KeyValue.Type type, long seqId, + byte[] value, int vOffset, int vLength, + byte[] tags, int tagsOffset, int tagsLength) { // Check row, family, qualifier and value - KeyValue.checkParameters(row, (row == null) ? 0 : row.length, // row and row length - family, (family == null) ? 0 : family.length, // family and family length - (qualifier == null) ? 0 : qualifier.length, // qualifier length - (value == null) ? 0 : value.length); // value length + KeyValue.checkParameters(row, rLength, // row and row length + family, fLength, // family and family length + qLength, // qualifier length + vLength); // value length // Check timestamp if (timestamp < 0) { @@ -69,19 +93,46 @@ public class IndividualBytesFieldCell implements ExtendedCell { } // Check tags - TagUtil.checkForTagsLength((tags == null) ? 0 : tags.length); - + TagUtil.checkForTagsLength(tagsLength); + checkArrayBounds(row, rOffset, rLength); + checkArrayBounds(family, fOffset, fLength); + checkArrayBounds(qualifier, qOffset, qLength); + checkArrayBounds(value, vOffset, vLength); + checkArrayBounds(tags, tagsOffset, tagsLength); // No local copy is made, but reference to the input directly - this.row = row; - this.family = family; - this.qualifier = qualifier; - this.value = value; - this.tags = tags; + this.row = row; + this.rOffset = rOffset; + this.rLength = rLength; + this.family = family; + this.fOffset = fOffset; + this.fLength = fLength; + this.qualifier = qualifier; + this.qOffset = qOffset; + this.qLength = qLength; + this.value = value; + this.vOffset = vOffset; + this.vLength = vLength; + this.tags = tags; + this.tagsOffset = tagsOffset; + this.tagsLength = tagsLength; // Set others - this.timestamp = timestamp; - this.type = type.getCode(); - this.seqId = seqId; + this.timestamp = timestamp; + this.type = type.getCode(); + this.seqId = seqId; + } + + private void checkArrayBounds(byte[] bytes, int offset, int length) { + if (offset < 0 || length < 0) { + throw new IllegalArgumentException("Negative number! offset=" + offset + "and length=" + length); + } + if (bytes == null && (offset != 0 || length != 0)) { + throw new IllegalArgumentException("Null bytes array but offset=" + offset + "and length=" + length); + } + if (bytes != null && bytes.length < offset + length) { + throw new IllegalArgumentException("Out of bounds! bytes.length=" + bytes.length + + ", offset=" + offset + ", length=" + length); + } } @Override @@ -99,8 +150,8 @@ public class IndividualBytesFieldCell implements ExtendedCell { // Tags length and tags byte array if (withTags && getTagsLength() > 0) { // Tags length - out.write((byte)(0xff & (tags.length >> 8))); - out.write((byte)(0xff & tags.length)); + out.write((byte)(0xff & (getTagsLength() >> 8))); + out.write((byte)(0xff & getTagsLength())); // Tags byte array out.write(tags); @@ -148,14 +199,14 @@ public class IndividualBytesFieldCell implements ExtendedCell { @Override public int getRowOffset() { - return 0; + return rOffset; } @Override public short getRowLength() { - // If row is null or row.length is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, - // so it is safe to call row.length and make the type conversion. - return (short)(row.length); + // If row is null or rLength is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, + // so it is safe to call rLength and make the type conversion. + return (short)(rLength); } // 2) Family @@ -167,15 +218,14 @@ public class IndividualBytesFieldCell implements ExtendedCell { @Override public int getFamilyOffset() { - return 0; + return fOffset; } @Override public byte getFamilyLength() { - // If family.length is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, + // If fLength is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, // so it is safe to make the type conversion. - // But need to consider the condition when family is null. - return (family == null) ? 0 : (byte)(family.length); + return (byte)(fLength); } // 3) Qualifier @@ -187,13 +237,12 @@ public class IndividualBytesFieldCell implements ExtendedCell { @Override public int getQualifierOffset() { - return 0; + return qOffset; } @Override public int getQualifierLength() { - // Qualifier could be null - return (qualifier == null) ? 0 : qualifier.length; + return qLength; } // 4) Timestamp @@ -223,13 +272,12 @@ public class IndividualBytesFieldCell implements ExtendedCell { @Override public int getValueOffset() { - return 0; + return vOffset; } @Override public int getValueLength() { - // Value could be null - return (value == null) ? 0 : value.length; + return vLength; } // 8) Tags @@ -241,13 +289,12 @@ public class IndividualBytesFieldCell implements ExtendedCell { @Override public int getTagsOffset() { - return 0; + return tagsOffset; } @Override public int getTagsLength() { - // Tags could be null - return (tags == null) ? 0 : tags.length; + return tagsLength; } /** @@ -298,4 +345,9 @@ public class IndividualBytesFieldCell implements ExtendedCell { public void setTimestamp(byte[] ts, int tsOffset) { setTimestamp(Bytes.toLong(ts, tsOffset)); } + + @Override + public String toString() { + return CellUtil.toString(this, true); + } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java new file mode 100644 index 00000000000..482fea3759e --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/IndividualBytesFieldCellBuilder.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +@InterfaceAudience.Private +class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl { + @Override + public ExtendedCell innerBuild() { + return new IndividualBytesFieldCell(row, rOffset, rLength, + family, fOffset, fLength, + qualifier, qOffset, qLength, + timestamp, KeyValue.Type.codeToType(type), seqId, + value, vOffset, vLength, + tags, tagsOffset, tagsLength); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java new file mode 100644 index 00000000000..7749cd0d1ad --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueBuilder.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.classification.InterfaceAudience; + +@InterfaceAudience.Private +class KeyValueBuilder extends ExtendedCellBuilderImpl { + @Override + protected ExtendedCell innerBuild() { + KeyValue kv = new KeyValue(row, rOffset, rLength, + family, fOffset, fLength, + qualifier, qOffset, qLength, + timestamp, KeyValue.Type.codeToType(type), + value, vOffset, vLength, + tags, tagsOffset, tagsLength); + kv.setSequenceId(seqId); + return kv; + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java index ca2e3e8032f..2be7e5d8586 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodec.java @@ -23,7 +23,9 @@ import java.io.OutputStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBuffInputStream; @@ -77,6 +79,7 @@ public class CellCodec implements Codec { } static class CellDecoder extends BaseDecoder { + private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); public CellDecoder(final InputStream in) { super(in); } @@ -95,7 +98,15 @@ public class CellCodec implements Codec { byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG]; IOUtils.readFully(this.in, memstoreTSArray); long memstoreTS = Bytes.toLong(memstoreTSArray); - return CellUtil.createCell(row, family, qualifier, timestamp, type, value, memstoreTS); + return cellBuilder.clear() + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setTimestamp(timestamp) + .setType(type) + .setValue(value) + .setSequenceId(memstoreTS) + .build(); } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java index 2dca10a515d..485bf87ead2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java @@ -23,7 +23,9 @@ import java.io.OutputStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBuffInputStream; @@ -78,6 +80,7 @@ public class CellCodecWithTags implements Codec { } static class CellDecoder extends BaseDecoder { + private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); public CellDecoder(final InputStream in) { super(in); } @@ -96,7 +99,16 @@ public class CellCodecWithTags implements Codec { byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG]; IOUtils.readFully(this.in, memstoreTSArray); long memstoreTS = Bytes.toLong(memstoreTSArray); - return CellUtil.createCell(row, family, qualifier, timestamp, type, value, tags, memstoreTS); + return cellBuilder.clear() + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setTimestamp(timestamp) + .setType(type) + .setValue(value) + .setSequenceId(memstoreTS) + .setTags(tags) + .build(); } /** diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java new file mode 100644 index 00000000000..fc8814eb4e5 --- /dev/null +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellBuilder.java @@ -0,0 +1,139 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase; + +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import static org.junit.Assert.assertEquals; + +@Category({MiscTests.class, SmallTests.class}) +public class TestCellBuilder { + + private static final byte OLD_DATA = 87; + private static final byte NEW_DATA = 100; + + @Test + public void testCellBuilderWithDeepCopy() { + byte[] row = new byte[]{OLD_DATA}; + byte[] family = new byte[]{OLD_DATA}; + byte[] qualifier = new byte[]{OLD_DATA}; + byte[] value = new byte[]{OLD_DATA}; + Cell cell = CellBuilderFactory.create(CellBuilderType.DEEP_COPY) + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setType(KeyValue.Type.Put.getCode()) + .setValue(value) + .build(); + row[0] = NEW_DATA; + family[0] = NEW_DATA; + qualifier[0] = NEW_DATA; + value[0] = NEW_DATA; + assertEquals(OLD_DATA, cell.getRowArray()[cell.getRowOffset()]); + assertEquals(OLD_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]); + assertEquals(OLD_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]); + assertEquals(OLD_DATA, cell.getValueArray()[cell.getValueOffset()]); + } + + @Test + public void testCellBuilderWithShallowCopy() { + byte[] row = new byte[]{OLD_DATA}; + byte[] family = new byte[]{OLD_DATA}; + byte[] qualifier = new byte[]{OLD_DATA}; + byte[] value = new byte[]{OLD_DATA}; + Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setType(KeyValue.Type.Put.getCode()) + .setValue(value) + .build(); + row[0] = NEW_DATA; + family[0] = NEW_DATA; + qualifier[0] = NEW_DATA; + value[0] = NEW_DATA; + assertEquals(NEW_DATA, cell.getRowArray()[cell.getRowOffset()]); + assertEquals(NEW_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]); + assertEquals(NEW_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]); + assertEquals(NEW_DATA, cell.getValueArray()[cell.getValueOffset()]); + } + + @Test + public void testExtendedCellBuilderWithShallowCopy() { + byte[] row = new byte[]{OLD_DATA}; + byte[] family = new byte[]{OLD_DATA}; + byte[] qualifier = new byte[]{OLD_DATA}; + byte[] value = new byte[]{OLD_DATA}; + byte[] tags = new byte[]{OLD_DATA}; + long seqId = 999; + Cell cell = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setType(KeyValue.Type.Put.getCode()) + .setValue(value) + .setTags(tags) + .setSequenceId(seqId) + .build(); + row[0] = NEW_DATA; + family[0] = NEW_DATA; + qualifier[0] = NEW_DATA; + value[0] = NEW_DATA; + tags[0] = NEW_DATA; + assertEquals(NEW_DATA, cell.getRowArray()[cell.getRowOffset()]); + assertEquals(NEW_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]); + assertEquals(NEW_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]); + assertEquals(NEW_DATA, cell.getValueArray()[cell.getValueOffset()]); + assertEquals(NEW_DATA, cell.getTagsArray()[cell.getTagsOffset()]); + assertEquals(seqId, cell.getSequenceId()); + } + + @Test + public void testExtendedCellBuilderWithDeepCopy() { + byte[] row = new byte[]{OLD_DATA}; + byte[] family = new byte[]{OLD_DATA}; + byte[] qualifier = new byte[]{OLD_DATA}; + byte[] value = new byte[]{OLD_DATA}; + byte[] tags = new byte[]{OLD_DATA}; + long seqId = 999; + Cell cell = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) + .setRow(row) + .setFamily(family) + .setQualifier(qualifier) + .setType(KeyValue.Type.Put.getCode()) + .setValue(value) + .setTags(tags) + .setSequenceId(seqId) + .build(); + row[0] = NEW_DATA; + family[0] = NEW_DATA; + qualifier[0] = NEW_DATA; + value[0] = NEW_DATA; + tags[0] = NEW_DATA; + assertEquals(OLD_DATA, cell.getRowArray()[cell.getRowOffset()]); + assertEquals(OLD_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]); + assertEquals(OLD_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]); + assertEquals(OLD_DATA, cell.getValueArray()[cell.getValueOffset()]); + assertEquals(OLD_DATA, cell.getTagsArray()[cell.getTagsOffset()]); + assertEquals(seqId, cell.getSequenceId()); + } +} diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java index 1f99f59e081..49eb2b74df1 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestIndividualBytesFieldCell.java @@ -20,21 +20,18 @@ package org.apache.hadoop.hbase; import java.io.IOException; import java.nio.ByteBuffer; + import org.apache.hadoop.hbase.io.ByteArrayOutputStream; - -import org.apache.hadoop.hbase.util.Bytes; -import static org.apache.hadoop.hbase.KeyValue.Type; - import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; - +import org.apache.hadoop.hbase.util.Bytes; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; @Category({MiscTests.class, SmallTests.class}) public class TestIndividualBytesFieldCell { @@ -53,7 +50,7 @@ public class TestIndividualBytesFieldCell { // Other inputs long timestamp = 5000L; long seqId = 0L; - Type type = KeyValue.Type.Put; + KeyValue.Type type = KeyValue.Type.Put; ic0 = new IndividualBytesFieldCell(row, family, qualifier, timestamp, type, seqId, value, tags); kv0 = new KeyValue(row, family, qualifier, timestamp, type, value, tags); @@ -142,7 +139,7 @@ public class TestIndividualBytesFieldCell { long timestamp = 5000L; long seqId = 0L; - Type type = KeyValue.Type.Put; + KeyValue.Type type = KeyValue.Type.Put; // Test when following fields are null. byte[] family = null; @@ -182,4 +179,64 @@ public class TestIndividualBytesFieldCell { public void testIfSettableTimestampImplemented() { assertTrue(ic0 instanceof SettableTimestamp); } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalRow() { + new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 100, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + 0L, KeyValue.Type.Put, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalFamily() { + new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3, + Bytes.toBytes("family"), 0, 100, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + 0L, KeyValue.Type.Put, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalQualifier() { + new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3, + Bytes.toBytes("family"), 0, 6, + Bytes.toBytes("qualifier"), 0, 100, + 0L, KeyValue.Type.Put, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalTimestamp() { + new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3, + Bytes.toBytes("family"), 0, 6, + Bytes.toBytes("qualifier"), 0, 9, + -100, KeyValue.Type.Put, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0, + HConstants.EMPTY_BYTE_ARRAY, 0, 0); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalValue() { + new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3, + Bytes.toBytes("family"), 0, 6, + Bytes.toBytes("qualifier"), 0, 9, + 0L, KeyValue.Type.Put, 0, + Bytes.toBytes("value"), 0, 100, + HConstants.EMPTY_BYTE_ARRAY, 0, 0); + } + + @Test(expected = IllegalArgumentException.class) + public void testIllegalTags() { + new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3, + Bytes.toBytes("family"), 0, 6, + Bytes.toBytes("qualifier"), 0, 9, + 0L, KeyValue.Type.Put, 0, + Bytes.toBytes("value"), 0, 5, + Bytes.toBytes("tags"), 0, 100); + } } diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java index 3f1b9710fe9..539f472a034 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/types/TestPBCell.java @@ -21,6 +21,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilderFactory; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -52,6 +54,6 @@ public class TestPBCell { pbr.setPosition(0); decoded = CODEC.decode(pbr); assertEquals(encodedLength, pbr.getPosition()); - assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(decoded))); + assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded))); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java index 41dc38777e0..44a2713d760 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java @@ -21,13 +21,16 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import org.apache.hadoop.hbase.CellBuilderType; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.ExtendedCellBuilder; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos; /** @@ -64,16 +67,14 @@ public class MessageCodec implements Codec { } static class MessageDecoder extends BaseDecoder { + private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); MessageDecoder(final InputStream in) { super(in); } + @Override protected Cell parseCell() throws IOException { - CellProtos.Cell pbcell = CellProtos.Cell.parseDelimitedFrom(this.in); - return CellUtil.createCell(pbcell.getRow().toByteArray(), - pbcell.getFamily().toByteArray(), pbcell.getQualifier().toByteArray(), - pbcell.getTimestamp(), (byte)pbcell.getCellType().getNumber(), - pbcell.getValue().toByteArray()); + return ProtobufUtil.toCell(cellBuilder, CellProtos.Cell.parseDelimitedFrom(this.in)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java index f38ffb5c1fa..cbd3726ee4d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HMobStore.java @@ -34,11 +34,11 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; @@ -358,15 +358,17 @@ public class HMobStore extends HStore { } } if (result == null) { - LOG.warn("The KeyValue result is null, assemble a new KeyValue with the same row,family," + LOG.warn("The Cell result is null, assemble a new Cell with the same row,family," + "qualifier,timestamp,type and tags but with an empty value to return."); - result = new KeyValue(reference.getRowArray(), reference.getRowOffset(), - reference.getRowLength(), reference.getFamilyArray(), reference.getFamilyOffset(), - reference.getFamilyLength(), reference.getQualifierArray(), - reference.getQualifierOffset(), reference.getQualifierLength(), reference.getTimestamp(), - Type.codeToType(reference.getTypeByte()), HConstants.EMPTY_BYTE_ARRAY, - 0, 0, reference.getTagsArray(), reference.getTagsOffset(), - reference.getTagsLength()); + result = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY) + .setRow(reference.getRowArray(), reference.getRowOffset(), reference.getRowLength()) + .setFamily(reference.getFamilyArray(), reference.getFamilyOffset(), reference.getFamilyLength()) + .setQualifier(reference.getQualifierArray(), reference.getQualifierOffset(), reference.getQualifierLength()) + .setTimestamp(reference.getTimestamp()) + .setType(reference.getTypeByte()) + .setValue(HConstants.EMPTY_BYTE_ARRAY) + .setTags(reference.getTagsArray(), reference.getTagsOffset(), reference.getTagsLength()) + .build(); } return result; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index bc4baaf80b3..4602b0155b4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -123,11 +123,13 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.conf.ConfigurationManager; import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException; import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.FilterWrapper; @@ -7546,13 +7548,16 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi if (currentCell != null) { tags = TagUtil.carryForwardTags(tags, currentCell); byte[] newValue = supplier.apply(currentCell); - // TODO: FIX. This is carnel knowledge of how KeyValues are made... - // This will be fixed by HBASE-18519 - return new KeyValue(mutation.getRow(), 0, mutation.getRow().length, - columnFamily, 0, columnFamily.length, - delta.getQualifierArray(), delta.getQualifierOffset(), delta.getQualifierLength(), - Math.max(currentCell.getTimestamp() + 1, now), - KeyValue.Type.Put, newValue, 0, newValue.length, tags); + return ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY) + .setRow(mutation.getRow(), 0, mutation.getRow().length) + .setFamily(columnFamily, 0, columnFamily.length) + // copy the qualifier if the cell is located in shared memory. + .setQualifier(CellUtil.cloneQualifier(delta)) + .setTimestamp(Math.max(currentCell.getTimestamp() + 1, now)) + .setType(KeyValue.Type.Put.getCode()) + .setValue(newValue, 0, newValue.length) + .setTags(TagUtil.fromList(tags)) + .build(); } else { CellUtil.updateLatestStamp(delta, now); return CollectionUtils.isEmpty(tags) ? delta : CellUtil.createCell(delta, tags); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java index e832d6bd566..b8773f8d965 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java @@ -25,6 +25,9 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilder; +import org.apache.hadoop.hbase.CellBuilderFactory; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor; @@ -35,6 +38,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; public class BulkLoadCellFilter { private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class); + private final CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); /** * Filters the bulk load cell using the supplied predicate. * @param cell The WAL cell to filter. @@ -75,7 +79,13 @@ public class BulkLoadCellFilter { .setBulkloadSeqNum(bld.getBulkloadSeqNum()); newDesc.addAllStores(copiedStoresList); BulkLoadDescriptor newBulkLoadDescriptor = newDesc.build(); - return CellUtil.createCell(CellUtil.cloneRow(cell), WALEdit.METAFAMILY, WALEdit.BULK_LOAD, - cell.getTimestamp(), cell.getTypeByte(), newBulkLoadDescriptor.toByteArray()); + return cellBuilder.clear() + .setRow(CellUtil.cloneRow(cell)) + .setFamily(WALEdit.METAFAMILY) + .setQualifier(WALEdit.BULK_LOAD) + .setTimestamp(cell.getTimestamp()) + .setType(cell.getTypeByte()) + .setValue(newBulkLoadDescriptor.toByteArray()) + .build(); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index c40d4817088..b064afdbf9a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -347,13 +347,13 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS permRequest, tableName, makeFamilyMap(family.getKey(), qualifier)); } } - } else if (family.getValue() instanceof List) { // List - List kvList = (List)family.getValue(); - for (KeyValue kv : kvList) { + } else if (family.getValue() instanceof List) { // List + List cellList = (List)family.getValue(); + for (Cell cell : cellList) { if (!authManager.authorize(user, tableName, family.getKey(), - CellUtil.cloneQualifier(kv), permRequest)) { + CellUtil.cloneQualifier(cell), permRequest)) { return AuthResult.deny(request, "Failed qualifier check", user, permRequest, - tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv))); + tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(cell))); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index 8bc80d5479f..153babf0a75 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -26,6 +26,8 @@ import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilderFactory; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ProcedureInfo; @@ -336,7 +338,7 @@ public class TestProtobufUtil { dbb.put(arr); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); - Cell newOffheapKV = ProtobufUtil.toCell(cell); + Cell newOffheapKV = ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java index ff9e4437c05..c5ad1cc26cd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/shaded/protobuf/TestProtobufUtil.java @@ -22,6 +22,8 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellBuilderFactory; +import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.ProcedureInfo; @@ -257,7 +259,7 @@ public class TestProtobufUtil { dbb.put(arr); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); - Cell newOffheapKV = ProtobufUtil.toCell(cell); + Cell newOffheapKV = ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell); assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); }