HBASE-18519 Use builder pattern to create cell

This commit is contained in:
Chia-Ping Tsai 2017-08-27 20:12:28 +08:00
parent e07ff0452f
commit 0142c09217
24 changed files with 1002 additions and 150 deletions

View File

@ -31,9 +31,14 @@ import java.util.function.Function;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -491,6 +496,7 @@ public final class ProtobufUtil {
throw new IllegalArgumentException("row cannot be null"); throw new IllegalArgumentException("row cannot be null");
} }
// The proto has the metadata and the data itself // The proto has the metadata and the data itself
ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (ColumnValue column: proto.getColumnValueList()) { for (ColumnValue column: proto.getColumnValueList()) {
byte[] family = column.getFamily().toByteArray(); byte[] family = column.getFamily().toByteArray();
for (QualifierValue qv: column.getQualifierValueList()) { for (QualifierValue qv: column.getQualifierValueList()) {
@ -510,9 +516,14 @@ public final class ProtobufUtil {
if (qv.hasTags()) { if (qv.hasTags()) {
allTagsBytes = qv.getTags().toByteArray(); allTagsBytes = qv.getTags().toByteArray();
if(qv.hasDeleteType()) { if(qv.hasDeleteType()) {
byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; put.add(cellBuilder.clear()
put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, .setRow(proto.getRow().toByteArray())
fromDeleteType(qv.getDeleteType()), null, allTagsBytes)); .setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(fromDeleteType(qv.getDeleteType()).getCode())
.setTags(allTagsBytes)
.build());
} else { } else {
List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length);
Tag[] tagsArray = new Tag[tags.size()]; Tag[] tagsArray = new Tag[tags.size()];
@ -520,9 +531,13 @@ public final class ProtobufUtil {
} }
} else { } else {
if(qv.hasDeleteType()) { if(qv.hasDeleteType()) {
byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; put.add(cellBuilder.clear()
put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, .setRow(proto.getRow().toByteArray())
fromDeleteType(qv.getDeleteType()))); .setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(fromDeleteType(qv.getDeleteType()).getCode())
.build());
} else{ } else{
put.addImmutable(family, qualifier, ts, value); put.addImmutable(family, qualifier, ts, value);
} }
@ -1314,8 +1329,9 @@ public final class ProtobufUtil {
} }
List<Cell> cells = new ArrayList<>(values.size()); List<Cell> cells = new ArrayList<>(values.size());
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (CellProtos.Cell c : values) { for (CellProtos.Cell c : values) {
cells.add(toCell(c)); cells.add(toCell(builder, c));
} }
return Result.create(cells, null, proto.getStale(), proto.getPartial()); return Result.create(cells, null, proto.getStale(), proto.getPartial());
} }
@ -1356,8 +1372,9 @@ public final class ProtobufUtil {
if (!values.isEmpty()){ if (!values.isEmpty()){
if (cells == null) cells = new ArrayList<>(values.size()); if (cells == null) cells = new ArrayList<>(values.size());
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (CellProtos.Cell c: values) { for (CellProtos.Cell c: values) {
cells.add(toCell(c)); cells.add(toCell(builder, c));
} }
} }
@ -1616,15 +1633,15 @@ public final class ProtobufUtil {
return kvbuilder.build(); return kvbuilder.build();
} }
public static Cell toCell(final CellProtos.Cell cell) { public static Cell toCell(CellBuilder cellBuilder, final CellProtos.Cell cell) {
// Doing this is going to kill us if we do it for all data passed. return cellBuilder.clear()
// St.Ack 20121205 .setRow(cell.getRow().toByteArray())
return CellUtil.createCell(cell.getRow().toByteArray(), .setFamily(cell.getFamily().toByteArray())
cell.getFamily().toByteArray(), .setQualifier(cell.getQualifier().toByteArray())
cell.getQualifier().toByteArray(), .setTimestamp(cell.getTimestamp())
cell.getTimestamp(), .setType((byte) cell.getCellType().getNumber())
(byte)cell.getCellType().getNumber(), .setValue(cell.getValue().toByteArray())
cell.getValue().toByteArray()); .build();
} }
/** /**

View File

@ -41,11 +41,16 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ClusterId; import org.apache.hadoop.hbase.ClusterId;
import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -419,7 +424,6 @@ public final class ProtobufUtil {
/** /**
* Get a list of TableDescriptor from GetTableDescriptorsResponse protobuf * Get a list of TableDescriptor from GetTableDescriptorsResponse protobuf
*
* @param proto the GetTableDescriptorsResponse * @param proto the GetTableDescriptorsResponse
* @return a list of TableDescriptor * @return a list of TableDescriptor
*/ */
@ -616,6 +620,7 @@ public final class ProtobufUtil {
throw new IllegalArgumentException("row cannot be null"); throw new IllegalArgumentException("row cannot be null");
} }
// The proto has the metadata and the data itself // The proto has the metadata and the data itself
ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (ColumnValue column: proto.getColumnValueList()) { for (ColumnValue column: proto.getColumnValueList()) {
byte[] family = column.getFamily().toByteArray(); byte[] family = column.getFamily().toByteArray();
for (QualifierValue qv: column.getQualifierValueList()) { for (QualifierValue qv: column.getQualifierValueList()) {
@ -635,9 +640,14 @@ public final class ProtobufUtil {
if (qv.hasTags()) { if (qv.hasTags()) {
allTagsBytes = qv.getTags().toByteArray(); allTagsBytes = qv.getTags().toByteArray();
if(qv.hasDeleteType()) { if(qv.hasDeleteType()) {
byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; put.add(cellBuilder.clear()
put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, .setRow(proto.getRow().toByteArray())
fromDeleteType(qv.getDeleteType()), null, allTagsBytes)); .setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(fromDeleteType(qv.getDeleteType()).getCode())
.setTags(allTagsBytes)
.build());
} else { } else {
List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length); List<Tag> tags = TagUtil.asList(allTagsBytes, 0, (short)allTagsBytes.length);
Tag[] tagsArray = new Tag[tags.size()]; Tag[] tagsArray = new Tag[tags.size()];
@ -645,9 +655,13 @@ public final class ProtobufUtil {
} }
} else { } else {
if(qv.hasDeleteType()) { if(qv.hasDeleteType()) {
byte[] qual = qv.hasQualifier() ? qv.getQualifier().toByteArray() : null; put.add(cellBuilder.clear()
put.add(new KeyValue(proto.getRow().toByteArray(), family, qual, ts, .setRow(proto.getRow().toByteArray())
fromDeleteType(qv.getDeleteType()))); .setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
.setTimestamp(ts)
.setType(fromDeleteType(qv.getDeleteType()).getCode())
.build());
} else{ } else{
put.addImmutable(family, qualifier, ts, value); put.addImmutable(family, qualifier, ts, value);
} }
@ -787,8 +801,15 @@ public final class ProtobufUtil {
if (qv.hasTags()) { if (qv.hasTags()) {
tags = qv.getTags().toByteArray(); tags = qv.getTags().toByteArray();
} }
consumer.accept(mutation, CellUtil.createCell(mutation.getRow(), family, qualifier, qv.getTimestamp(), consumer.accept(mutation, ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
KeyValue.Type.Put, value, tags)); .setRow(mutation.getRow())
.setFamily(family)
.setQualifier(qualifier)
.setTimestamp(qv.getTimestamp())
.setType(KeyValue.Type.Put.getCode())
.setValue(value)
.setTags(tags)
.build());
} }
} }
} }
@ -1461,8 +1482,9 @@ public final class ProtobufUtil {
} }
List<Cell> cells = new ArrayList<>(values.size()); List<Cell> cells = new ArrayList<>(values.size());
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (CellProtos.Cell c : values) { for (CellProtos.Cell c : values) {
cells.add(toCell(c)); cells.add(toCell(builder, c));
} }
return Result.create(cells, null, proto.getStale(), proto.getPartial()); return Result.create(cells, null, proto.getStale(), proto.getPartial());
} }
@ -1503,8 +1525,9 @@ public final class ProtobufUtil {
if (!values.isEmpty()){ if (!values.isEmpty()){
if (cells == null) cells = new ArrayList<>(values.size()); if (cells == null) cells = new ArrayList<>(values.size());
CellBuilder builder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
for (CellProtos.Cell c: values) { for (CellProtos.Cell c: values) {
cells.add(toCell(c)); cells.add(toCell(builder, c));
} }
} }
@ -2009,15 +2032,15 @@ public final class ProtobufUtil {
return UnsafeByteOperations.unsafeWrap(dup); return UnsafeByteOperations.unsafeWrap(dup);
} }
public static Cell toCell(final CellProtos.Cell cell) { public static Cell toCell(CellBuilder cellBuilder, final CellProtos.Cell cell) {
// Doing this is going to kill us if we do it for all data passed. return cellBuilder.clear()
// St.Ack 20121205 .setRow(cell.getRow().toByteArray())
return CellUtil.createCell(cell.getRow().toByteArray(), .setFamily(cell.getFamily().toByteArray())
cell.getFamily().toByteArray(), .setQualifier(cell.getQualifier().toByteArray())
cell.getQualifier().toByteArray(), .setTimestamp(cell.getTimestamp())
cell.getTimestamp(), .setType((byte) cell.getCellType().getNumber())
(byte)cell.getCellType().getNumber(), .setValue(cell.getValue().toByteArray())
cell.getValue().toByteArray()); .build();
} }
public static HBaseProtos.NamespaceDescriptor toProtoNamespaceDescriptor(NamespaceDescriptor ns) { public static HBaseProtos.NamespaceDescriptor toProtoNamespaceDescriptor(NamespaceDescriptor ns) {

View File

@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
* Use {@link CellBuilderFactory} to get CellBuilder instance.
*/
@InterfaceAudience.Public
public interface CellBuilder {
CellBuilder setRow(final byte[] row);
CellBuilder setRow(final byte[] row, final int rOffset, final int rLength);
CellBuilder setFamily(final byte[] family);
CellBuilder setFamily(final byte[] family, final int fOffset, final int fLength);
CellBuilder setQualifier(final byte[] qualifier);
CellBuilder setQualifier(final byte[] qualifier, final int qOffset, final int qLength);
CellBuilder setTimestamp(final long timestamp);
CellBuilder setType(final byte type);
CellBuilder setValue(final byte[] value);
CellBuilder setValue(final byte[] value, final int vOffset, final int vLength);
Cell build();
/**
* Remove all internal elements from builder.
* @return this
*/
CellBuilder clear();
}

View File

@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
* Create a CellBuilder instance. Currently, we have two kinds of Cell Builder.
* {@link CellBuilderType#DEEP_COPY} All bytes array passed into builder will be copied to build an new Cell.
* The cell impl is {@link org.apache.hadoop.hbase.KeyValue}
* {@link CellBuilderType#SHALLOW_COPY} Just copy the references of passed bytes array to build an new Cell
* The cell impl is {@link org.apache.hadoop.hbase.IndividualBytesFieldCell}
* NOTE: The cell impl may be changed in the future. The user application SHOULD NOT depend on any concrete cell impl.
*/
@InterfaceAudience.Public
public final class CellBuilderFactory {
/**
* Create a CellBuilder instance.
* @param type indicates which memory copy is used in building cell.
* @return An new CellBuilder
*/
public static CellBuilder create(CellBuilderType type) {
switch (type) {
case SHALLOW_COPY:
return new IndividualBytesFieldCellBuilder();
case DEEP_COPY:
return new KeyValueBuilder();
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}
}
private CellBuilderFactory(){
}
}

View File

@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
* Used by {@link CellBuilderFactory} and {@link ExtendedCellBuilderFactory}.
* Indicates which memory copy is used in building cell.
*/
@InterfaceAudience.Public
public enum CellBuilderType {
/**
* The cell builder will copy all passed bytes for building cell.
*/
DEEP_COPY,
/**
* DON'T modify the byte array passed to cell builder
* because all fields in new cell are reference to input arguments
*/
SHALLOW_COPY
}

View File

@ -316,73 +316,104 @@ public final class CellUtil {
return buffer; return buffer;
} }
/**
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link CellBuilder} instead
*/
@Deprecated
public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier, public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier,
final long timestamp, final byte type, final byte [] value) { final long timestamp, final byte type, final byte [] value) {
// I need a Cell Factory here. Using KeyValue for now. TODO. return CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
// TODO: Make a new Cell implementation that just carries these .setRow(row)
// byte arrays. .setFamily(family)
// TODO: Call factory to create Cell .setQualifier(qualifier)
return new KeyValue(row, family, qualifier, timestamp, KeyValue.Type.codeToType(type), value); .setTimestamp(timestamp)
.setType(type)
.setValue(value)
.build();
} }
/**
* Creates a cell with deep copy of all passed bytes.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link CellBuilder} instead
*/
@Deprecated
public static Cell createCell(final byte [] rowArray, final int rowOffset, final int rowLength, public static Cell createCell(final byte [] rowArray, final int rowOffset, final int rowLength,
final byte [] familyArray, final int familyOffset, final int familyLength, final byte [] familyArray, final int familyOffset, final int familyLength,
final byte [] qualifierArray, final int qualifierOffset, final int qualifierLength) { final byte [] qualifierArray, final int qualifierOffset, final int qualifierLength) {
// See createCell(final byte [] row, final byte [] value) for why we default Maximum type. // See createCell(final byte [] row, final byte [] value) for why we default Maximum type.
return new KeyValue(rowArray, rowOffset, rowLength, return CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
familyArray, familyOffset, familyLength, .setRow(rowArray, rowOffset, rowLength)
qualifierArray, qualifierOffset, qualifierLength, .setFamily(familyArray, familyOffset, familyLength)
HConstants.LATEST_TIMESTAMP, .setQualifier(qualifierArray, qualifierOffset, qualifierLength)
KeyValue.Type.Maximum, .setTimestamp(HConstants.LATEST_TIMESTAMP)
HConstants.EMPTY_BYTE_ARRAY, 0, HConstants.EMPTY_BYTE_ARRAY.length); .setType(KeyValue.Type.Maximum.getCode())
.setValue(HConstants.EMPTY_BYTE_ARRAY, 0, HConstants.EMPTY_BYTE_ARRAY.length)
.build();
} }
/** /**
* Marked as audience Private as of 1.2.0. * Marked as audience Private as of 1.2.0.
* Creating a Cell with a memstoreTS/mvcc is an internal implementation detail not for * Creating a Cell with a memstoreTS/mvcc is an internal implementation detail not for
* public use. * public use.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link ExtendedCellBuilder} instead
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
@Deprecated
public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
final long timestamp, final byte type, final byte[] value, final long memstoreTS) { final long timestamp, final byte type, final byte[] value, final long memstoreTS) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, return createCell(row, family, qualifier, timestamp, type, value, null, memstoreTS);
KeyValue.Type.codeToType(type), value);
keyValue.setSequenceId(memstoreTS);
return keyValue;
} }
/** /**
* Marked as audience Private as of 1.2.0. * Marked as audience Private as of 1.2.0.
* Creating a Cell with tags and a memstoreTS/mvcc is an internal implementation detail not for * Creating a Cell with tags and a memstoreTS/mvcc is an internal implementation detail not for
* public use. * public use.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link ExtendedCellBuilder} instead
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
@Deprecated
public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
final long timestamp, final byte type, final byte[] value, byte[] tags, final long timestamp, final byte type, final byte[] value, byte[] tags,
final long memstoreTS) { final long memstoreTS) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
KeyValue.Type.codeToType(type), value, tags); .setRow(row)
keyValue.setSequenceId(memstoreTS); .setFamily(family)
return keyValue; .setQualifier(qualifier)
.setTimestamp(timestamp)
.setType(type)
.setValue(value)
.setTags(tags)
.setSequenceId(memstoreTS)
.build();
} }
/** /**
* Marked as audience Private as of 1.2.0. * Marked as audience Private as of 1.2.0.
* Creating a Cell with tags is an internal implementation detail not for * Creating a Cell with tags is an internal implementation detail not for
* public use. * public use.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link ExtendedCellBuilder} instead
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
@Deprecated
public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier, public static Cell createCell(final byte[] row, final byte[] family, final byte[] qualifier,
final long timestamp, Type type, final byte[] value, byte[] tags) { final long timestamp, Type type, final byte[] value, byte[] tags) {
KeyValue keyValue = new KeyValue(row, family, qualifier, timestamp, type, value, tags); return createCell(row, family, qualifier, timestamp, type.getCode(), value,
return keyValue; tags, 0);
} }
/** /**
* Create a Cell with specific row. Other fields defaulted. * Create a Cell with specific row. Other fields defaulted.
* @param row * @param row
* @return Cell with passed row but all other fields are arbitrary * @return Cell with passed row but all other fields are arbitrary
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link CellBuilder} instead
*/ */
@Deprecated
public static Cell createCell(final byte [] row) { public static Cell createCell(final byte [] row) {
return createCell(row, HConstants.EMPTY_BYTE_ARRAY); return createCell(row, HConstants.EMPTY_BYTE_ARRAY);
} }
@ -392,7 +423,10 @@ public final class CellUtil {
* @param row * @param row
* @param value * @param value
* @return Cell with passed row and value but all other fields are arbitrary * @return Cell with passed row and value but all other fields are arbitrary
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link CellBuilder} instead
*/ */
@Deprecated
public static Cell createCell(final byte [] row, final byte [] value) { public static Cell createCell(final byte [] row, final byte [] value) {
// An empty family + empty qualifier + Type.Minimum is used as flag to indicate last on row. // An empty family + empty qualifier + Type.Minimum is used as flag to indicate last on row.
// See the CellComparator and KeyValue comparator. Search for compareWithoutRow. // See the CellComparator and KeyValue comparator. Search for compareWithoutRow.
@ -408,7 +442,10 @@ public final class CellUtil {
* @param family * @param family
* @param qualifier * @param qualifier
* @return Cell with passed row but all other fields are arbitrary * @return Cell with passed row but all other fields are arbitrary
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use {@link CellBuilder} instead
*/ */
@Deprecated
public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier) { public static Cell createCell(final byte [] row, final byte [] family, final byte [] qualifier) {
// See above in createCell(final byte [] row, final byte [] value) why we set type to Maximum. // See above in createCell(final byte [] row, final byte [] value) why we set type to Maximum.
return createCell(row, family, qualifier, return createCell(row, family, qualifier,

View File

@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
* For internal purpose.
* {@link Tag} and memstoreTS/mvcc are internal implementation detail
* that should not be exposed publicly.
* Use {@link ExtendedCellBuilderFactory} to get ExtendedCellBuilder instance.
* TODO: ditto for ByteBufferCell?
*/
@InterfaceAudience.Private
public interface ExtendedCellBuilder extends CellBuilder {
@Override
ExtendedCellBuilder setRow(final byte[] row);
@Override
ExtendedCellBuilder setRow(final byte[] row, final int rOffset, final int rLength);
@Override
ExtendedCellBuilder setFamily(final byte[] family);
@Override
ExtendedCellBuilder setFamily(final byte[] family, final int fOffset, final int fLength);
@Override
ExtendedCellBuilder setQualifier(final byte[] qualifier);
@Override
ExtendedCellBuilder setQualifier(final byte[] qualifier, final int qOffset, final int qLength);
@Override
ExtendedCellBuilder setTimestamp(final long timestamp);
@Override
ExtendedCellBuilder setType(final byte type);
@Override
ExtendedCellBuilder setValue(final byte[] value);
@Override
ExtendedCellBuilder setValue(final byte[] value, final int vOffset, final int vLength);
@Override
ExtendedCell build();
@Override
ExtendedCellBuilder clear();
ExtendedCellBuilder setTags(final byte[] tags);
ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength);
ExtendedCellBuilder setSequenceId(final long seqId);
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private
public final class ExtendedCellBuilderFactory {
public static ExtendedCellBuilder create(CellBuilderType type) {
switch (type) {
case SHALLOW_COPY:
return new IndividualBytesFieldCellBuilder();
case DEEP_COPY:
return new KeyValueBuilder();
default:
throw new UnsupportedOperationException("The type:" + type + " is unsupported");
}
}
private ExtendedCellBuilderFactory(){
}
}

View File

@ -0,0 +1,163 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ArrayUtils;
@InterfaceAudience.Private
public abstract class ExtendedCellBuilderImpl implements ExtendedCellBuilder {
protected byte[] row = null;
protected int rOffset = 0;
protected int rLength = 0;
protected byte[] family = null;
protected int fOffset = 0;
protected int fLength = 0;
protected byte[] qualifier = null;
protected int qOffset = 0;
protected int qLength = 0;
protected long timestamp = HConstants.LATEST_TIMESTAMP;
protected Byte type = null;
protected byte[] value = null;
protected int vOffset = 0;
protected int vLength = 0;
protected long seqId = 0;
protected byte[] tags = null;
protected int tagsOffset = 0;
protected int tagsLength = 0;
@Override
public ExtendedCellBuilder setRow(final byte[] row) {
return setRow(row, 0, ArrayUtils.length(row));
}
@Override
public ExtendedCellBuilder setRow(final byte[] row, int rOffset, int rLength) {
this.row = row;
this.rOffset = rOffset;
this.rLength = rLength;
return this;
}
@Override
public ExtendedCellBuilder setFamily(final byte[] family) {
return setFamily(family, 0, ArrayUtils.length(family));
}
@Override
public ExtendedCellBuilder setFamily(final byte[] family, int fOffset, int fLength) {
this.family = family;
this.fOffset = fOffset;
this.fLength = fLength;
return this;
}
@Override
public ExtendedCellBuilder setQualifier(final byte[] qualifier) {
return setQualifier(qualifier, 0, ArrayUtils.length(qualifier));
}
@Override
public ExtendedCellBuilder setQualifier(final byte[] qualifier, int qOffset, int qLength) {
this.qualifier = qualifier;
this.qOffset = qOffset;
this.qLength = qLength;
return this;
}
@Override
public ExtendedCellBuilder setTimestamp(final long timestamp) {
this.timestamp = timestamp;
return this;
}
@Override
public ExtendedCellBuilder setType(final byte type) {
this.type = type;
return this;
}
@Override
public ExtendedCellBuilder setValue(final byte[] value) {
return setValue(value, 0, ArrayUtils.length(value));
}
@Override
public ExtendedCellBuilder setValue(final byte[] value, int vOffset, int vLength) {
this.value = value;
this.vOffset = vOffset;
this.vLength = vLength;
return this;
}
@Override
public ExtendedCellBuilder setTags(final byte[] tags) {
return setTags(tags, 0, ArrayUtils.length(tags));
}
@Override
public ExtendedCellBuilder setTags(final byte[] tags, int tagsOffset, int tagsLength) {
this.tags = tags;
this.tagsOffset = tagsOffset;
this.tagsLength = tagsLength;
return this;
}
@Override
public ExtendedCellBuilder setSequenceId(final long seqId) {
this.seqId = seqId;
return this;
}
private void checkBeforeBuild() {
if (type == null) {
throw new IllegalArgumentException("The type can't be NULL");
}
}
protected abstract ExtendedCell innerBuild();
@Override
public ExtendedCell build() {
checkBeforeBuild();
return innerBuild();
}
@Override
public ExtendedCellBuilder clear() {
row = null;
rOffset = 0;
rLength = 0;
family = null;
fOffset = 0;
fLength = 0;
qualifier = null;
qOffset = 0;
qLength = 0;
timestamp = HConstants.LATEST_TIMESTAMP;
type = null;
value = null;
vOffset = 0;
vLength = 0;
seqId = 0;
tags = null;
tagsOffset = 0;
tagsLength = 0;
return this;
}
}

View File

@ -18,16 +18,16 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.ArrayUtils;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@InterfaceAudience.Private @InterfaceAudience.Private
public class IndividualBytesFieldCell implements ExtendedCell { public class IndividualBytesFieldCell implements ExtendedCell {
@ -38,15 +38,25 @@ public class IndividualBytesFieldCell implements ExtendedCell {
+ 5 * ClassSize.REFERENCE); // references to all byte arrays: row, family, qualifier, value, tags + 5 * ClassSize.REFERENCE); // references to all byte arrays: row, family, qualifier, value, tags
// The following fields are backed by individual byte arrays // The following fields are backed by individual byte arrays
private byte[] row; private final byte[] row;
private byte[] family; private final int rOffset;
private byte[] qualifier; private final int rLength;
private byte[] value; private final byte[] family;
private byte[] tags; // A byte array, rather than an array of org.apache.hadoop.hbase.Tag private final int fOffset;
private final int fLength;
private final byte[] qualifier;
private final int qOffset;
private final int qLength;
private final byte[] value;
private final int vOffset;
private final int vLength;
private final byte[] tags; // A byte array, rather than an array of org.apache.hadoop.hbase.Tag
private final int tagsOffset;
private final int tagsLength;
// Other fields // Other fields
private long timestamp; private long timestamp;
private byte type; // A byte, rather than org.apache.hadoop.hbase.KeyValue.Type private final byte type; // A byte, rather than org.apache.hadoop.hbase.KeyValue.Type
private long seqId; private long seqId;
public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier, public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier,
@ -56,12 +66,26 @@ public class IndividualBytesFieldCell implements ExtendedCell {
public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier, public IndividualBytesFieldCell(byte[] row, byte[] family, byte[] qualifier,
long timestamp, KeyValue.Type type, long seqId, byte[] value, byte[] tags) { long timestamp, KeyValue.Type type, long seqId, byte[] value, byte[] tags) {
this(row, 0, ArrayUtils.length(row),
family, 0, ArrayUtils.length(family),
qualifier, 0, ArrayUtils.length(qualifier),
timestamp, type, seqId,
value, 0, ArrayUtils.length(value),
tags, 0, ArrayUtils.length(tags));
}
public IndividualBytesFieldCell(byte[] row, int rOffset, int rLength,
byte[] family, int fOffset, int fLength,
byte[] qualifier, int qOffset, int qLength,
long timestamp, KeyValue.Type type, long seqId,
byte[] value, int vOffset, int vLength,
byte[] tags, int tagsOffset, int tagsLength) {
// Check row, family, qualifier and value // Check row, family, qualifier and value
KeyValue.checkParameters(row, (row == null) ? 0 : row.length, // row and row length KeyValue.checkParameters(row, rLength, // row and row length
family, (family == null) ? 0 : family.length, // family and family length family, fLength, // family and family length
(qualifier == null) ? 0 : qualifier.length, // qualifier length qLength, // qualifier length
(value == null) ? 0 : value.length); // value length vLength); // value length
// Check timestamp // Check timestamp
if (timestamp < 0) { if (timestamp < 0) {
@ -69,14 +93,28 @@ public class IndividualBytesFieldCell implements ExtendedCell {
} }
// Check tags // Check tags
TagUtil.checkForTagsLength((tags == null) ? 0 : tags.length); TagUtil.checkForTagsLength(tagsLength);
checkArrayBounds(row, rOffset, rLength);
checkArrayBounds(family, fOffset, fLength);
checkArrayBounds(qualifier, qOffset, qLength);
checkArrayBounds(value, vOffset, vLength);
checkArrayBounds(tags, tagsOffset, tagsLength);
// No local copy is made, but reference to the input directly // No local copy is made, but reference to the input directly
this.row = row; this.row = row;
this.rOffset = rOffset;
this.rLength = rLength;
this.family = family; this.family = family;
this.fOffset = fOffset;
this.fLength = fLength;
this.qualifier = qualifier; this.qualifier = qualifier;
this.qOffset = qOffset;
this.qLength = qLength;
this.value = value; this.value = value;
this.vOffset = vOffset;
this.vLength = vLength;
this.tags = tags; this.tags = tags;
this.tagsOffset = tagsOffset;
this.tagsLength = tagsLength;
// Set others // Set others
this.timestamp = timestamp; this.timestamp = timestamp;
@ -84,6 +122,19 @@ public class IndividualBytesFieldCell implements ExtendedCell {
this.seqId = seqId; this.seqId = seqId;
} }
private void checkArrayBounds(byte[] bytes, int offset, int length) {
if (offset < 0 || length < 0) {
throw new IllegalArgumentException("Negative number! offset=" + offset + "and length=" + length);
}
if (bytes == null && (offset != 0 || length != 0)) {
throw new IllegalArgumentException("Null bytes array but offset=" + offset + "and length=" + length);
}
if (bytes != null && bytes.length < offset + length) {
throw new IllegalArgumentException("Out of bounds! bytes.length=" + bytes.length
+ ", offset=" + offset + ", length=" + length);
}
}
@Override @Override
public int write(OutputStream out, boolean withTags) throws IOException { public int write(OutputStream out, boolean withTags) throws IOException {
// Key length and then value length // Key length and then value length
@ -99,8 +150,8 @@ public class IndividualBytesFieldCell implements ExtendedCell {
// Tags length and tags byte array // Tags length and tags byte array
if (withTags && getTagsLength() > 0) { if (withTags && getTagsLength() > 0) {
// Tags length // Tags length
out.write((byte)(0xff & (tags.length >> 8))); out.write((byte)(0xff & (getTagsLength() >> 8)));
out.write((byte)(0xff & tags.length)); out.write((byte)(0xff & getTagsLength()));
// Tags byte array // Tags byte array
out.write(tags); out.write(tags);
@ -148,14 +199,14 @@ public class IndividualBytesFieldCell implements ExtendedCell {
@Override @Override
public int getRowOffset() { public int getRowOffset() {
return 0; return rOffset;
} }
@Override @Override
public short getRowLength() { public short getRowLength() {
// If row is null or row.length is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, // If row is null or rLength is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()},
// so it is safe to call row.length and make the type conversion. // so it is safe to call rLength and make the type conversion.
return (short)(row.length); return (short)(rLength);
} }
// 2) Family // 2) Family
@ -167,15 +218,14 @@ public class IndividualBytesFieldCell implements ExtendedCell {
@Override @Override
public int getFamilyOffset() { public int getFamilyOffset() {
return 0; return fOffset;
} }
@Override @Override
public byte getFamilyLength() { public byte getFamilyLength() {
// If family.length is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()}, // If fLength is invalid, the constructor will reject it, by {@link KeyValue#checkParameters()},
// so it is safe to make the type conversion. // so it is safe to make the type conversion.
// But need to consider the condition when family is null. return (byte)(fLength);
return (family == null) ? 0 : (byte)(family.length);
} }
// 3) Qualifier // 3) Qualifier
@ -187,13 +237,12 @@ public class IndividualBytesFieldCell implements ExtendedCell {
@Override @Override
public int getQualifierOffset() { public int getQualifierOffset() {
return 0; return qOffset;
} }
@Override @Override
public int getQualifierLength() { public int getQualifierLength() {
// Qualifier could be null return qLength;
return (qualifier == null) ? 0 : qualifier.length;
} }
// 4) Timestamp // 4) Timestamp
@ -223,13 +272,12 @@ public class IndividualBytesFieldCell implements ExtendedCell {
@Override @Override
public int getValueOffset() { public int getValueOffset() {
return 0; return vOffset;
} }
@Override @Override
public int getValueLength() { public int getValueLength() {
// Value could be null return vLength;
return (value == null) ? 0 : value.length;
} }
// 8) Tags // 8) Tags
@ -241,13 +289,12 @@ public class IndividualBytesFieldCell implements ExtendedCell {
@Override @Override
public int getTagsOffset() { public int getTagsOffset() {
return 0; return tagsOffset;
} }
@Override @Override
public int getTagsLength() { public int getTagsLength() {
// Tags could be null return tagsLength;
return (tags == null) ? 0 : tags.length;
} }
/** /**
@ -298,4 +345,9 @@ public class IndividualBytesFieldCell implements ExtendedCell {
public void setTimestamp(byte[] ts, int tsOffset) { public void setTimestamp(byte[] ts, int tsOffset) {
setTimestamp(Bytes.toLong(ts, tsOffset)); setTimestamp(Bytes.toLong(ts, tsOffset));
} }
@Override
public String toString() {
return CellUtil.toString(this, true);
}
} }

View File

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private
class IndividualBytesFieldCellBuilder extends ExtendedCellBuilderImpl {
@Override
public ExtendedCell innerBuild() {
return new IndividualBytesFieldCell(row, rOffset, rLength,
family, fOffset, fLength,
qualifier, qOffset, qLength,
timestamp, KeyValue.Type.codeToType(type), seqId,
value, vOffset, vLength,
tags, tagsOffset, tagsLength);
}
}

View File

@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private
class KeyValueBuilder extends ExtendedCellBuilderImpl {
@Override
protected ExtendedCell innerBuild() {
KeyValue kv = new KeyValue(row, rOffset, rLength,
family, fOffset, fLength,
qualifier, qOffset, qLength,
timestamp, KeyValue.Type.codeToType(type),
value, vOffset, vLength,
tags, tagsOffset, tagsLength);
kv.setSequenceId(seqId);
return kv;
}
}

View File

@ -23,7 +23,9 @@ import java.io.OutputStream;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.io.ByteBuffInputStream;
@ -77,6 +79,7 @@ public class CellCodec implements Codec {
} }
static class CellDecoder extends BaseDecoder { static class CellDecoder extends BaseDecoder {
private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
public CellDecoder(final InputStream in) { public CellDecoder(final InputStream in) {
super(in); super(in);
} }
@ -95,7 +98,15 @@ public class CellCodec implements Codec {
byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG]; byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG];
IOUtils.readFully(this.in, memstoreTSArray); IOUtils.readFully(this.in, memstoreTSArray);
long memstoreTS = Bytes.toLong(memstoreTSArray); long memstoreTS = Bytes.toLong(memstoreTSArray);
return CellUtil.createCell(row, family, qualifier, timestamp, type, value, memstoreTS); return cellBuilder.clear()
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setTimestamp(timestamp)
.setType(type)
.setValue(value)
.setSequenceId(memstoreTS)
.build();
} }
/** /**

View File

@ -23,7 +23,9 @@ import java.io.OutputStream;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.io.ByteBuffInputStream;
@ -78,6 +80,7 @@ public class CellCodecWithTags implements Codec {
} }
static class CellDecoder extends BaseDecoder { static class CellDecoder extends BaseDecoder {
private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
public CellDecoder(final InputStream in) { public CellDecoder(final InputStream in) {
super(in); super(in);
} }
@ -96,7 +99,16 @@ public class CellCodecWithTags implements Codec {
byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG]; byte[] memstoreTSArray = new byte[Bytes.SIZEOF_LONG];
IOUtils.readFully(this.in, memstoreTSArray); IOUtils.readFully(this.in, memstoreTSArray);
long memstoreTS = Bytes.toLong(memstoreTSArray); long memstoreTS = Bytes.toLong(memstoreTSArray);
return CellUtil.createCell(row, family, qualifier, timestamp, type, value, tags, memstoreTS); return cellBuilder.clear()
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setTimestamp(timestamp)
.setType(type)
.setValue(value)
.setSequenceId(memstoreTS)
.setTags(tags)
.build();
} }
/** /**

View File

@ -0,0 +1,139 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
@Category({MiscTests.class, SmallTests.class})
public class TestCellBuilder {
private static final byte OLD_DATA = 87;
private static final byte NEW_DATA = 100;
@Test
public void testCellBuilderWithDeepCopy() {
byte[] row = new byte[]{OLD_DATA};
byte[] family = new byte[]{OLD_DATA};
byte[] qualifier = new byte[]{OLD_DATA};
byte[] value = new byte[]{OLD_DATA};
Cell cell = CellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setType(KeyValue.Type.Put.getCode())
.setValue(value)
.build();
row[0] = NEW_DATA;
family[0] = NEW_DATA;
qualifier[0] = NEW_DATA;
value[0] = NEW_DATA;
assertEquals(OLD_DATA, cell.getRowArray()[cell.getRowOffset()]);
assertEquals(OLD_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]);
assertEquals(OLD_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]);
assertEquals(OLD_DATA, cell.getValueArray()[cell.getValueOffset()]);
}
@Test
public void testCellBuilderWithShallowCopy() {
byte[] row = new byte[]{OLD_DATA};
byte[] family = new byte[]{OLD_DATA};
byte[] qualifier = new byte[]{OLD_DATA};
byte[] value = new byte[]{OLD_DATA};
Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setType(KeyValue.Type.Put.getCode())
.setValue(value)
.build();
row[0] = NEW_DATA;
family[0] = NEW_DATA;
qualifier[0] = NEW_DATA;
value[0] = NEW_DATA;
assertEquals(NEW_DATA, cell.getRowArray()[cell.getRowOffset()]);
assertEquals(NEW_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]);
assertEquals(NEW_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]);
assertEquals(NEW_DATA, cell.getValueArray()[cell.getValueOffset()]);
}
@Test
public void testExtendedCellBuilderWithShallowCopy() {
byte[] row = new byte[]{OLD_DATA};
byte[] family = new byte[]{OLD_DATA};
byte[] qualifier = new byte[]{OLD_DATA};
byte[] value = new byte[]{OLD_DATA};
byte[] tags = new byte[]{OLD_DATA};
long seqId = 999;
Cell cell = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setType(KeyValue.Type.Put.getCode())
.setValue(value)
.setTags(tags)
.setSequenceId(seqId)
.build();
row[0] = NEW_DATA;
family[0] = NEW_DATA;
qualifier[0] = NEW_DATA;
value[0] = NEW_DATA;
tags[0] = NEW_DATA;
assertEquals(NEW_DATA, cell.getRowArray()[cell.getRowOffset()]);
assertEquals(NEW_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]);
assertEquals(NEW_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]);
assertEquals(NEW_DATA, cell.getValueArray()[cell.getValueOffset()]);
assertEquals(NEW_DATA, cell.getTagsArray()[cell.getTagsOffset()]);
assertEquals(seqId, cell.getSequenceId());
}
@Test
public void testExtendedCellBuilderWithDeepCopy() {
byte[] row = new byte[]{OLD_DATA};
byte[] family = new byte[]{OLD_DATA};
byte[] qualifier = new byte[]{OLD_DATA};
byte[] value = new byte[]{OLD_DATA};
byte[] tags = new byte[]{OLD_DATA};
long seqId = 999;
Cell cell = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(row)
.setFamily(family)
.setQualifier(qualifier)
.setType(KeyValue.Type.Put.getCode())
.setValue(value)
.setTags(tags)
.setSequenceId(seqId)
.build();
row[0] = NEW_DATA;
family[0] = NEW_DATA;
qualifier[0] = NEW_DATA;
value[0] = NEW_DATA;
tags[0] = NEW_DATA;
assertEquals(OLD_DATA, cell.getRowArray()[cell.getRowOffset()]);
assertEquals(OLD_DATA, cell.getFamilyArray()[cell.getFamilyOffset()]);
assertEquals(OLD_DATA, cell.getQualifierArray()[cell.getQualifierOffset()]);
assertEquals(OLD_DATA, cell.getValueArray()[cell.getValueOffset()]);
assertEquals(OLD_DATA, cell.getTagsArray()[cell.getTagsOffset()]);
assertEquals(seqId, cell.getSequenceId());
}
}

View File

@ -20,21 +20,18 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream; import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.util.Bytes;
import static org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@Category({MiscTests.class, SmallTests.class}) @Category({MiscTests.class, SmallTests.class})
public class TestIndividualBytesFieldCell { public class TestIndividualBytesFieldCell {
@ -53,7 +50,7 @@ public class TestIndividualBytesFieldCell {
// Other inputs // Other inputs
long timestamp = 5000L; long timestamp = 5000L;
long seqId = 0L; long seqId = 0L;
Type type = KeyValue.Type.Put; KeyValue.Type type = KeyValue.Type.Put;
ic0 = new IndividualBytesFieldCell(row, family, qualifier, timestamp, type, seqId, value, tags); ic0 = new IndividualBytesFieldCell(row, family, qualifier, timestamp, type, seqId, value, tags);
kv0 = new KeyValue(row, family, qualifier, timestamp, type, value, tags); kv0 = new KeyValue(row, family, qualifier, timestamp, type, value, tags);
@ -142,7 +139,7 @@ public class TestIndividualBytesFieldCell {
long timestamp = 5000L; long timestamp = 5000L;
long seqId = 0L; long seqId = 0L;
Type type = KeyValue.Type.Put; KeyValue.Type type = KeyValue.Type.Put;
// Test when following fields are null. // Test when following fields are null.
byte[] family = null; byte[] family = null;
@ -182,4 +179,64 @@ public class TestIndividualBytesFieldCell {
public void testIfSettableTimestampImplemented() { public void testIfSettableTimestampImplemented() {
assertTrue(ic0 instanceof SettableTimestamp); assertTrue(ic0 instanceof SettableTimestamp);
} }
@Test(expected = IllegalArgumentException.class)
public void testIllegalRow() {
new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 100,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
0L, KeyValue.Type.Put, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalFamily() {
new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3,
Bytes.toBytes("family"), 0, 100,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
0L, KeyValue.Type.Put, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalQualifier() {
new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3,
Bytes.toBytes("family"), 0, 6,
Bytes.toBytes("qualifier"), 0, 100,
0L, KeyValue.Type.Put, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalTimestamp() {
new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3,
Bytes.toBytes("family"), 0, 6,
Bytes.toBytes("qualifier"), 0, 9,
-100, KeyValue.Type.Put, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0,
HConstants.EMPTY_BYTE_ARRAY, 0, 0);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalValue() {
new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3,
Bytes.toBytes("family"), 0, 6,
Bytes.toBytes("qualifier"), 0, 9,
0L, KeyValue.Type.Put, 0,
Bytes.toBytes("value"), 0, 100,
HConstants.EMPTY_BYTE_ARRAY, 0, 0);
}
@Test(expected = IllegalArgumentException.class)
public void testIllegalTags() {
new IndividualBytesFieldCell(Bytes.toBytes("row"), 0, 3,
Bytes.toBytes("family"), 0, 6,
Bytes.toBytes("qualifier"), 0, 9,
0L, KeyValue.Type.Put, 0,
Bytes.toBytes("value"), 0, 5,
Bytes.toBytes("tags"), 0, 100);
}
} }

View File

@ -21,6 +21,8 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.MiscTests;
@ -52,6 +54,6 @@ public class TestPBCell {
pbr.setPosition(0); pbr.setPosition(0);
decoded = CODEC.decode(pbr); decoded = CODEC.decode(pbr);
assertEquals(encodedLength, pbr.getPosition()); assertEquals(encodedLength, pbr.getPosition());
assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(decoded))); assertTrue(CellUtil.equals(cell, ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), decoded)));
} }
} }

View File

@ -21,13 +21,16 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ByteBuffInputStream; import org.apache.hadoop.hbase.io.ByteBuffInputStream;
import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos;
/** /**
@ -64,16 +67,14 @@ public class MessageCodec implements Codec {
} }
static class MessageDecoder extends BaseDecoder { static class MessageDecoder extends BaseDecoder {
private final ExtendedCellBuilder cellBuilder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
MessageDecoder(final InputStream in) { MessageDecoder(final InputStream in) {
super(in); super(in);
} }
@Override
protected Cell parseCell() throws IOException { protected Cell parseCell() throws IOException {
CellProtos.Cell pbcell = CellProtos.Cell.parseDelimitedFrom(this.in); return ProtobufUtil.toCell(cellBuilder, CellProtos.Cell.parseDelimitedFrom(this.in));
return CellUtil.createCell(pbcell.getRow().toByteArray(),
pbcell.getFamily().toByteArray(), pbcell.getQualifier().toByteArray(),
pbcell.getTimestamp(), (byte)pbcell.getCellType().getNumber(),
pbcell.getValue().toByteArray());
} }
} }

View File

@ -34,11 +34,11 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagType;
@ -358,15 +358,17 @@ public class HMobStore extends HStore {
} }
} }
if (result == null) { if (result == null) {
LOG.warn("The KeyValue result is null, assemble a new KeyValue with the same row,family," LOG.warn("The Cell result is null, assemble a new Cell with the same row,family,"
+ "qualifier,timestamp,type and tags but with an empty value to return."); + "qualifier,timestamp,type and tags but with an empty value to return.");
result = new KeyValue(reference.getRowArray(), reference.getRowOffset(), result = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
reference.getRowLength(), reference.getFamilyArray(), reference.getFamilyOffset(), .setRow(reference.getRowArray(), reference.getRowOffset(), reference.getRowLength())
reference.getFamilyLength(), reference.getQualifierArray(), .setFamily(reference.getFamilyArray(), reference.getFamilyOffset(), reference.getFamilyLength())
reference.getQualifierOffset(), reference.getQualifierLength(), reference.getTimestamp(), .setQualifier(reference.getQualifierArray(), reference.getQualifierOffset(), reference.getQualifierLength())
Type.codeToType(reference.getTypeByte()), HConstants.EMPTY_BYTE_ARRAY, .setTimestamp(reference.getTimestamp())
0, 0, reference.getTagsArray(), reference.getTagsOffset(), .setType(reference.getTypeByte())
reference.getTagsLength()); .setValue(HConstants.EMPTY_BYTE_ARRAY)
.setTags(reference.getTagsArray(), reference.getTagsOffset(), reference.getTagsLength())
.build();
} }
return result; return result;
} }

View File

@ -123,11 +123,13 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.conf.ConfigurationManager; import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver; import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException; import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException; import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException; import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterWrapper; import org.apache.hadoop.hbase.filter.FilterWrapper;
@ -7546,13 +7548,16 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
if (currentCell != null) { if (currentCell != null) {
tags = TagUtil.carryForwardTags(tags, currentCell); tags = TagUtil.carryForwardTags(tags, currentCell);
byte[] newValue = supplier.apply(currentCell); byte[] newValue = supplier.apply(currentCell);
// TODO: FIX. This is carnel knowledge of how KeyValues are made... return ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
// This will be fixed by HBASE-18519 .setRow(mutation.getRow(), 0, mutation.getRow().length)
return new KeyValue(mutation.getRow(), 0, mutation.getRow().length, .setFamily(columnFamily, 0, columnFamily.length)
columnFamily, 0, columnFamily.length, // copy the qualifier if the cell is located in shared memory.
delta.getQualifierArray(), delta.getQualifierOffset(), delta.getQualifierLength(), .setQualifier(CellUtil.cloneQualifier(delta))
Math.max(currentCell.getTimestamp() + 1, now), .setTimestamp(Math.max(currentCell.getTimestamp() + 1, now))
KeyValue.Type.Put, newValue, 0, newValue.length, tags); .setType(KeyValue.Type.Put.getCode())
.setValue(newValue, 0, newValue.length)
.setTags(TagUtil.fromList(tags))
.build();
} else { } else {
CellUtil.updateLatestStamp(delta, now); CellUtil.updateLatestStamp(delta, now);
return CollectionUtils.isEmpty(tags) ? delta : CellUtil.createCell(delta, tags); return CollectionUtils.isEmpty(tags) ? delta : CellUtil.createCell(delta, tags);

View File

@ -25,6 +25,9 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
@ -35,6 +38,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate;
public class BulkLoadCellFilter { public class BulkLoadCellFilter {
private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class); private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class);
private final CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
/** /**
* Filters the bulk load cell using the supplied predicate. * Filters the bulk load cell using the supplied predicate.
* @param cell The WAL cell to filter. * @param cell The WAL cell to filter.
@ -75,7 +79,13 @@ public class BulkLoadCellFilter {
.setBulkloadSeqNum(bld.getBulkloadSeqNum()); .setBulkloadSeqNum(bld.getBulkloadSeqNum());
newDesc.addAllStores(copiedStoresList); newDesc.addAllStores(copiedStoresList);
BulkLoadDescriptor newBulkLoadDescriptor = newDesc.build(); BulkLoadDescriptor newBulkLoadDescriptor = newDesc.build();
return CellUtil.createCell(CellUtil.cloneRow(cell), WALEdit.METAFAMILY, WALEdit.BULK_LOAD, return cellBuilder.clear()
cell.getTimestamp(), cell.getTypeByte(), newBulkLoadDescriptor.toByteArray()); .setRow(CellUtil.cloneRow(cell))
.setFamily(WALEdit.METAFAMILY)
.setQualifier(WALEdit.BULK_LOAD)
.setTimestamp(cell.getTimestamp())
.setType(cell.getTypeByte())
.setValue(newBulkLoadDescriptor.toByteArray())
.build();
} }
} }

View File

@ -347,13 +347,13 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
permRequest, tableName, makeFamilyMap(family.getKey(), qualifier)); permRequest, tableName, makeFamilyMap(family.getKey(), qualifier));
} }
} }
} else if (family.getValue() instanceof List) { // List<KeyValue> } else if (family.getValue() instanceof List) { // List<Cell>
List<KeyValue> kvList = (List<KeyValue>)family.getValue(); List<Cell> cellList = (List<Cell>)family.getValue();
for (KeyValue kv : kvList) { for (Cell cell : cellList) {
if (!authManager.authorize(user, tableName, family.getKey(), if (!authManager.authorize(user, tableName, family.getKey(),
CellUtil.cloneQualifier(kv), permRequest)) { CellUtil.cloneQualifier(cell), permRequest)) {
return AuthResult.deny(request, "Failed qualifier check", user, permRequest, return AuthResult.deny(request, "Failed qualifier check", user, permRequest,
tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(kv))); tableName, makeFamilyMap(family.getKey(), CellUtil.cloneQualifier(cell)));
} }
} }
} }

View File

@ -26,6 +26,8 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.ProcedureInfo;
@ -336,7 +338,7 @@ public class TestProtobufUtil {
dbb.put(arr); dbb.put(arr);
ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength());
CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV);
Cell newOffheapKV = ProtobufUtil.toCell(cell); Cell newOffheapKV = ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell);
assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0);
} }

View File

@ -22,6 +22,8 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.ProcedureInfo;
@ -257,7 +259,7 @@ public class TestProtobufUtil {
dbb.put(arr); dbb.put(arr);
ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength()); ByteBufferKeyValue offheapKV = new ByteBufferKeyValue(dbb, kv1.getLength(), kv2.getLength());
CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV); CellProtos.Cell cell = ProtobufUtil.toCell(offheapKV);
Cell newOffheapKV = ProtobufUtil.toCell(cell); Cell newOffheapKV = ProtobufUtil.toCell(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY), cell);
assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0); assertTrue(CellComparator.COMPARATOR.compare(offheapKV, newOffheapKV) == 0);
} }