HBASE-9994 ZeroCopyLiteralByteString.wrap() should be used in place of ByteString.copyFrom()

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1543309 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2013-11-19 05:01:58 +00:00
parent 51d55f6dd9
commit dc20322a6a
2 changed files with 9 additions and 8 deletions

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
import com.google.protobuf.ByteString;
import com.google.protobuf.ZeroCopyLiteralByteString;
/**
* Codec that just writes out Cell as a protobuf Cell Message. Does not write the mvcc stamp.
@ -45,15 +46,15 @@ public class MessageCodec implements Codec {
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
// This copies bytes from Cell to ByteString. I don't see anyway around the copy.
// ByteString is final.
builder.setRow(ByteString.copyFrom(cell.getRowArray(), cell.getRowOffset(),
builder.setRow(ZeroCopyLiteralByteString.wrap(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength()));
builder.setFamily(ByteString.copyFrom(cell.getFamilyArray(), cell.getFamilyOffset(),
builder.setFamily(ZeroCopyLiteralByteString.wrap(cell.getFamilyArray(), cell.getFamilyOffset(),
cell.getFamilyLength()));
builder.setQualifier(ByteString.copyFrom(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength()));
builder.setQualifier(ZeroCopyLiteralByteString.wrap(cell.getQualifierArray(),
cell.getQualifierOffset(), cell.getQualifierLength()));
builder.setTimestamp(cell.getTimestamp());
builder.setCellType(CellProtos.CellType.valueOf(cell.getTypeByte()));
builder.setValue(ByteString.copyFrom(cell.getValueArray(), cell.getValueOffset(),
builder.setValue(ZeroCopyLiteralByteString.wrap(cell.getValueArray(), cell.getValueOffset(),
cell.getValueLength()));
CellProtos.Cell pbcell = builder.build();
pbcell.writeDelimitedTo(this.out);
@ -83,4 +84,4 @@ public class MessageCodec implements Codec {
public Encoder getEncoder(OutputStream os) {
return new MessageEncoder(os);
}
}
}

View File

@ -69,7 +69,7 @@ import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
import com.google.protobuf.ZeroCopyLiteralByteString;
/**
* TableSnapshotInputFormat allows a MapReduce job to run over a table snapshot. The job
@ -157,7 +157,7 @@ public class TableSnapshotInputFormat extends InputFormat<ImmutableBytesWritable
MapReduceProtos.TableSnapshotRegionSplit.newBuilder()
.setRegion(RegionSpecifier.newBuilder()
.setType(RegionSpecifierType.ENCODED_REGION_NAME)
.setValue(ByteString.copyFrom(Bytes.toBytes(regionName))).build());
.setValue(ZeroCopyLiteralByteString.wrap(Bytes.toBytes(regionName))).build());
for (String location : locations) {
builder.addLocations(location);