HBASE-13825 Use ProtobufUtil#mergeFrom and ProtobufUtil#mergeDelimitedFrom in place of builder methods of same name

Incorporates HBASE-14076
This commit is contained in:
Andrew Purtell 2015-08-06 21:48:04 -07:00
parent 51061f08a3
commit b194052ec0
44 changed files with 390 additions and 142 deletions

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.hbase;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.UUID;
/**
@ -66,8 +66,9 @@ public class ClusterId {
ClusterIdProtos.ClusterId.Builder builder = ClusterIdProtos.ClusterId.newBuilder();
ClusterIdProtos.ClusterId cid = null;
try {
cid = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
cid = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return convert(cid);

View File

@ -46,7 +46,6 @@ import org.apache.hadoop.io.WritableComparable;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* An HColumnDescriptor contains information about a column family such as the
@ -1420,8 +1419,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
ColumnFamilySchema cfs = null;
try {
cfs = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
cfs = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return convert(cfs);

View File

@ -48,8 +48,6 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.PairOfSameType;
import org.apache.hadoop.io.DataInputBuffer;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Information about a region. A region is a range of keys in the whole keyspace of a table, an
* identifier (a timestamp) for differentiating between subset ranges (after region split)
@ -1118,11 +1116,11 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
if (ProtobufUtil.isPBMagicPrefix(bytes, offset, len)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
HBaseProtos.RegionInfo ri =
HBaseProtos.RegionInfo.newBuilder().
mergeFrom(bytes, pblen + offset, len - pblen).build();
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
ProtobufUtil.mergeFrom(builder, bytes, pblen + offset, len - pblen);
HBaseProtos.RegionInfo ri = builder.build();
return convert(ri);
} catch (InvalidProtocolBufferException e) {
} catch (IOException e) {
throw new DeserializationException(e);
}
} else {

View File

@ -56,8 +56,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.WritableComparable;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* HTableDescriptor contains the details about an HBase table such as the descriptors of
* all the column families, is the table a catalog table, <code> -ROOT- </code> or
@ -1585,8 +1583,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
TableSchema.Builder builder = TableSchema.newBuilder();
TableSchema ts;
try {
ts = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
ts = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return convert(ts);

View File

@ -17,8 +17,9 @@
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@ -121,10 +122,11 @@ public class RegionTransition {
ProtobufUtil.expectPBMagicPrefix(data);
try {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
ZooKeeperProtos.RegionTransition rt = ZooKeeperProtos.RegionTransition.newBuilder().
mergeFrom(data, prefixLen, data.length - prefixLen).build();
return new RegionTransition(rt);
} catch (InvalidProtocolBufferException e) {
ZooKeeperProtos.RegionTransition.Builder builder =
ZooKeeperProtos.RegionTransition.newBuilder();
ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen);
return new RegionTransition(builder.build());
} catch (IOException e) {
throw new DeserializationException(e);
}
}

View File

@ -75,7 +75,6 @@ import org.apache.hadoop.hbase.util.Threads;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.Descriptors;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
@ -1884,10 +1883,10 @@ public class HTable implements HTableInterface, RegionLocator {
", value=" + serviceResult.getValue().getValue());
}
try {
callback.update(region, row,
(R) responsePrototype.newBuilderForType().mergeFrom(
serviceResult.getValue().getValue()).build());
} catch (InvalidProtocolBufferException e) {
Message.Builder builder = responsePrototype.newBuilderForType();
ProtobufUtil.mergeFrom(builder, serviceResult.getValue().getValue());
callback.update(region, row, (R) builder.build());
} catch (IOException e) {
LOG.error("Unexpected response type from endpoint " + methodDescriptor.getFullName(),
e);
callbackErrorExceptions.add(e);

View File

@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.ipc.RemoteException;
@ -90,7 +91,7 @@ public class AsyncServerResponseHandler extends ChannelInboundHandlerAdapter {
// Call may be null because it may have timedout and been cleaned up on this side already
if (call.responseDefaultType != null) {
Message.Builder builder = call.responseDefaultType.newBuilderForType();
builder.mergeDelimitedFrom(in);
ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
CellScanner cellBlockScanner = null;

View File

@ -68,8 +68,9 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
CoprocessorServiceResponse result = ProtobufUtil.execService(connection.getMaster(), call);
Message response = null;
if (result.getValue().hasValue()) {
response = responsePrototype.newBuilderForType()
.mergeFrom(result.getValue().getValue()).build();
Message.Builder builder = responsePrototype.newBuilderForType();
ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
response = builder.build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}

View File

@ -96,8 +96,9 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
.callWithRetries(callable, operationTimeout);
Message response = null;
if (result.getValue().hasValue()) {
response = responsePrototype.newBuilderForType()
.mergeFrom(result.getValue().getValue()).build();
Message.Builder builder = responsePrototype.newBuilderForType();
ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
response = builder.build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}

View File

@ -62,8 +62,9 @@ public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
ProtobufUtil.execRegionServerService(connection.getClient(serverName), call);
Message response = null;
if (result.getValue().hasValue()) {
response =
responsePrototype.newBuilderForType().mergeFrom(result.getValue().getValue()).build();
Message.Builder builder = responsePrototype.newBuilderForType();
ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
response = builder.build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}

View File

@ -959,7 +959,7 @@ public class RpcClientImpl extends AbstractRpcClient {
Message value = null;
if (call.responseDefaultType != null) {
Builder builder = call.responseDefaultType.newBuilderForType();
builder.mergeDelimitedFrom(in);
ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
CellScanner cellBlockScanner = null;

View File

@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpeci
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
@ -67,6 +68,7 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@ -155,6 +157,7 @@ import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
@ -2765,8 +2768,9 @@ public final class ProtobufUtil {
ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder();
ClientProtos.CellVisibility proto = null;
try {
proto = builder.mergeFrom(protoBytes).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, protoBytes);
proto = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return toCellVisibility(proto);
@ -2807,8 +2811,9 @@ public final class ProtobufUtil {
ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder();
ClientProtos.Authorizations proto = null;
try {
proto = builder.mergeFrom(protoBytes).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, protoBytes);
proto = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return toAuthorizations(proto);
@ -3070,6 +3075,104 @@ public final class ProtobufUtil {
return desc.build();
}
/**
* This version of protobuf's mergeDelimitedFrom avoids the hard-coded 64MB limit for decoding
* buffers
* @param builder current message builder
* @param in Inputsream with delimited protobuf data
* @throws IOException
*/
public static void mergeDelimitedFrom(Message.Builder builder, InputStream in)
throws IOException {
// This used to be builder.mergeDelimitedFrom(in);
// but is replaced to allow us to bump the protobuf size limit.
final int firstByte = in.read();
if (firstByte != -1) {
final int size = CodedInputStream.readRawVarint32(firstByte, in);
final InputStream limitedInput = new LimitInputStream(in, size);
final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
codedInput.setSizeLimit(size);
builder.mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
}
}
/**
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
* buffers where the message size is known
* @param builder current message builder
* @param in InputStream containing protobuf data
* @param size known size of protobuf data
* @throws IOException
*/
public static void mergeFrom(Message.Builder builder, InputStream in, int size)
throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(in);
codedInput.setSizeLimit(size);
builder.mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
}
/**
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
* buffers where the message size is not known
* @param builder current message builder
* @param in InputStream containing protobuf data
* @throws IOException
*/
public static void mergeFrom(Message.Builder builder, InputStream in)
throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(in);
codedInput.setSizeLimit(Integer.MAX_VALUE);
builder.mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
}
/**
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
* buffers when working with ByteStrings
* @param builder current message builder
* @param bs ByteString containing the
* @throws IOException
*/
public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException {
final CodedInputStream codedInput = bs.newCodedInput();
codedInput.setSizeLimit(bs.size());
builder.mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
}
/**
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
* buffers when working with byte arrays
* @param builder current message builder
* @param b byte array
* @throws IOException
*/
public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(b);
codedInput.setSizeLimit(b.length);
builder.mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
}
/**
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
* buffers when working with byte arrays
* @param builder current message builder
* @param b byte array
* @param offset
* @param length
* @throws IOException
*/
public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length)
throws IOException {
final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length);
codedInput.setSizeLimit(length);
builder.mergeFrom(codedInput);
codedInput.checkLastTagWas(0);
}
public static ReplicationLoadSink toReplicationLoadSink(
ClusterStatusProtos.ReplicationLoadSink cls) {
return new ReplicationLoadSink(cls.getAgeOfLastAppliedOp(), cls.getTimeStampsOfLastAppliedOp());

View File

@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NodeExistsException;
import com.google.protobuf.InvalidProtocolBufferException;
@InterfaceAudience.Private
public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closeable {
private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class);
@ -199,9 +197,10 @@ public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closea
ZooKeeperProtos.ReplicationState.newBuilder();
ZooKeeperProtos.ReplicationState state;
try {
state = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
state = builder.build();
return state.getState();
} catch (InvalidProtocolBufferException e) {
} catch (IOException e) {
throw new DeserializationException(e);
}
}

View File

@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
import org.apache.zookeeper.KeeperException;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* This class provides an implementation of the ReplicationPeers interface using Zookeeper. The
@ -492,8 +491,9 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
ZooKeeperProtos.ReplicationPeer.newBuilder();
ZooKeeperProtos.ReplicationPeer peer;
try {
peer = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
peer = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return convert(peer);

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.token;
import com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
@ -136,8 +137,10 @@ public class AuthenticationTokenIdentifier extends TokenIdentifier {
int len = in.readInt();
byte[] inBytes = new byte[len];
in.readFully(inBytes);
AuthenticationProtos.TokenIdentifier identifier =
AuthenticationProtos.TokenIdentifier.newBuilder().mergeFrom(inBytes).build();
AuthenticationProtos.TokenIdentifier.Builder builder =
AuthenticationProtos.TokenIdentifier.newBuilder();
ProtobufUtil.mergeFrom(builder, inBytes);
AuthenticationProtos.TokenIdentifier identifier = builder.build();
// sanity check on type
if (!identifier.hasKind() ||
identifier.getKind() != AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN) {

View File

@ -19,8 +19,6 @@
*/
package org.apache.hadoop.hbase.zookeeper;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@ -28,6 +26,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.zookeeper.KeeperException;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -189,9 +188,9 @@ public class ZKTableStateClientSideReader {
ProtobufUtil.expectPBMagicPrefix(data);
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
int magicLen = ProtobufUtil.lengthOfPBMagic();
ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build();
return t.getState();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen);
return builder.getState();
} catch (IOException e) {
KeeperException ke = new KeeperException.DataInconsistencyException();
ke.initCause(e);
throw ke;

View File

@ -1956,12 +1956,14 @@ public class ZKUtil {
}
// parse the data of the above peer znode.
try {
String clusterKey = ZooKeeperProtos.ReplicationPeer.newBuilder().
mergeFrom(data, pblen, data.length - pblen).getClusterkey();
sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey);
// add the peer-state.
appendPeerState(zkw, znodeToProcess, sb);
} catch (InvalidProtocolBufferException ipbe) {
ZooKeeperProtos.ReplicationPeer.Builder builder =
ZooKeeperProtos.ReplicationPeer.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
String clusterKey = builder.getClusterkey();
sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey);
// add the peer-state.
appendPeerState(zkw, znodeToProcess, sb);
} catch (IOException ipbe) {
LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
}
}
@ -1979,8 +1981,12 @@ public class ZKUtil {
byte[] peerStateData;
try {
peerStateData = ZKUtil.getData(zkw, peerStateZnode);
sb.append(ZooKeeperProtos.ReplicationState.newBuilder()
.mergeFrom(peerStateData, pblen, peerStateData.length - pblen).getState().name());
ZooKeeperProtos.ReplicationState.Builder builder =
ZooKeeperProtos.ReplicationState.newBuilder();
ProtobufUtil.mergeFrom(builder, peerStateData, pblen, peerStateData.length - pblen);
sb.append(builder.getState().name());
} catch (IOException ipbe) {
LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
} catch (InterruptedException e) {
zkw.interruptedException(e);
return;
@ -2201,8 +2207,9 @@ public class ZKUtil {
ZooKeeperProtos.ReplicationHLogPosition.newBuilder();
ZooKeeperProtos.ReplicationHLogPosition position;
try {
position = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
position = builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return position.getPosition();
@ -2257,8 +2264,9 @@ public class ZKUtil {
int pblen = ProtobufUtil.lengthOfPBMagic();
RegionStoreSequenceIds storeIds = null;
try {
storeIds = regionSequenceIdsBuilder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(regionSequenceIdsBuilder, bytes, pblen, bytes.length - pblen);
storeIds = regionSequenceIdsBuilder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
return storeIds;

View File

@ -37,6 +37,7 @@ public class PBCell extends PBType<CellProtos.Cell> {
public int skip(PositionedByteRange src) {
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
CodedInputStream is = inputStreamFromByteRange(src);
is.setSizeLimit(src.getLength());
try {
builder.mergeFrom(is);
int consumed = is.getTotalBytesRead();
@ -51,6 +52,7 @@ public class PBCell extends PBType<CellProtos.Cell> {
public CellProtos.Cell decode(PositionedByteRange src) {
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
CodedInputStream is = inputStreamFromByteRange(src);
is.setSizeLimit(src.getLength());
try {
CellProtos.Cell ret = builder.mergeFrom(is).build();
src.setPosition(src.getPosition() + is.getTotalBytesRead());

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.codehaus.jackson.annotate.JsonProperty;
@ -198,7 +199,7 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Cell.Builder builder = Cell.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
setColumn(builder.getColumn().toByteArray());
setValue(builder.getData().toByteArray());
if (builder.hasTimestamp()) {

View File

@ -32,6 +32,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
@ -133,7 +134,7 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
CellSet.Builder builder = CellSet.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
for (CellSet.Row row: builder.getRowsList()) {
RowModel rowModel = new RowModel(row.getKey().toByteArray());
for (Cell cell: row.getValuesList()) {

View File

@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
import org.apache.hadoop.hbase.security.visibility.Authorizations;
@ -827,7 +828,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Scanner.Builder builder = Scanner.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
if (builder.hasStartRow()) {
startRow = builder.getStartRow().toByteArray();
}

View File

@ -31,6 +31,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hadoop.hbase.util.Bytes;
@ -748,7 +749,7 @@ public class StorageClusterStatusModel
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRegions()) {
regions = builder.getRegions();
}

View File

@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
@ -146,7 +147,7 @@ public class TableInfoModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
TableInfo.Builder builder = TableInfo.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
setName(builder.getName());
for (TableInfo.Region region: builder.getRegionsList()) {
add(new TableRegionModel(builder.getName(), region.getId(),

View File

@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
@ -104,7 +105,7 @@ public class TableListModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
TableList.Builder builder = TableList.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
for (String table: builder.getNameList()) {
this.add(new TableModel(table));
}

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
@ -309,7 +310,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
TableSchema.Builder builder = TableSchema.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
this.setName(builder.getName());
for (TableSchema.Attribute attr: builder.getAttrsList()) {
this.addAttribute(attr.getName(), attr.getValue());

View File

@ -27,6 +27,7 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.RESTServlet;
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
@ -188,7 +189,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
Version.Builder builder = Version.newBuilder();
builder.mergeFrom(message);
ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRestVersion()) {
restVersion = builder.getRestVersion();
}

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@ -25,8 +27,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
import org.apache.hadoop.hbase.util.Bytes;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* State of a WAL log split during distributed splitting. State is kept up in zookeeper.
* Encapsulates protobuf serialization/deserialization so we don't leak generated pb outside of
@ -160,10 +160,10 @@ public class SplitLogTask {
ProtobufUtil.expectPBMagicPrefix(data);
try {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
ZooKeeperProtos.SplitLogTask slt = ZooKeeperProtos.SplitLogTask.newBuilder().
mergeFrom(data, prefixLen, data.length - prefixLen).build();
return new SplitLogTask(slt);
} catch (InvalidProtocolBufferException e) {
ZooKeeperProtos.SplitLogTask.Builder builder = ZooKeeperProtos.SplitLogTask.newBuilder();
ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen);
return new SplitLogTask(builder.build());
} catch (IOException e) {
throw new DeserializationException(Bytes.toStringBinary(data, 0, 64), e);
}
}

View File

@ -131,7 +131,6 @@ import com.google.protobuf.BlockingService;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Message;
import com.google.protobuf.Message.Builder;
import com.google.protobuf.ServiceException;
import com.google.protobuf.TextFormat;
@ -1778,7 +1777,9 @@ public class RpcServer implements RpcServerInterface {
CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length);
int headerSize = cis.readRawVarint32();
offset = cis.getTotalBytesRead();
RequestHeader header = RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build();
Message.Builder builder = RequestHeader.newBuilder();
ProtobufUtil.mergeFrom(builder, buf, offset, headerSize);
RequestHeader header = (RequestHeader) builder.build();
offset += headerSize;
int id = header.getCallId();
if (LOG.isTraceEnabled()) {
@ -1806,13 +1807,14 @@ public class RpcServer implements RpcServerInterface {
if (header.hasRequestParam() && header.getRequestParam()) {
md = this.service.getDescriptorForType().findMethodByName(header.getMethodName());
if (md == null) throw new UnsupportedOperationException(header.getMethodName());
Builder builder = this.service.getRequestPrototype(md).newBuilderForType();
builder = this.service.getRequestPrototype(md).newBuilderForType();
// To read the varint, I need an inputstream; might as well be a CIS.
cis = CodedInputStream.newInstance(buf, offset, buf.length);
int paramSize = cis.readRawVarint32();
offset += cis.getTotalBytesRead();
if (builder != null) {
param = builder.mergeFrom(buf, offset, paramSize).build();
ProtobufUtil.mergeFrom(builder, buf, offset, paramSize);
param = builder.build();
}
offset += paramSize;
}

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
@ -57,7 +57,9 @@ public class MutationSerialization implements Serialization<Mutation> {
@Override
public Mutation deserialize(Mutation mutation) throws IOException {
MutationProto proto = MutationProto.parseDelimitedFrom(in);
ClientProtos.MutationProto.Builder builder = ClientProtos.MutationProto.newBuilder();
ProtobufUtil.mergeDelimitedFrom(builder, in);
ClientProtos.MutationProto proto = builder.build();
return ProtobufUtil.toMutation(proto);
}

View File

@ -125,7 +125,9 @@ public class ResultSerialization extends Configured implements Serialization<Res
@Override
public Result deserialize(Result mutation) throws IOException {
ClientProtos.Result proto = ClientProtos.Result.parseDelimitedFrom(in);
ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder();
ProtobufUtil.mergeDelimitedFrom(builder, in);
ClientProtos.Result proto = builder.build();
return ProtobufUtil.toResult(proto);
}

View File

@ -628,8 +628,9 @@ public class MasterRpcServices extends RSRpcServices
}
//invoke the method
Message execRequest = service.getRequestPrototype(methodDesc).newBuilderForType()
.mergeFrom(call.getRequest()).build();
Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType();
ProtobufUtil.mergeFrom(builderForType, call.getRequest());
Message execRequest = builderForType.build();
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, execController, execRequest, new RpcCallback<Message>() {

View File

@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.zookeeper.lock.ZKInterProcessReadWriteLock;
import org.apache.zookeeper.KeeperException;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* A manager for distributed table level locks.
*/
@ -213,10 +211,10 @@ public abstract class TableLockManager {
return null;
}
try {
ZooKeeperProtos.TableLock data = ZooKeeperProtos.TableLock.newBuilder().mergeFrom(
bytes, pblen, bytes.length - pblen).build();
return data;
} catch (InvalidProtocolBufferException ex) {
ZooKeeperProtos.TableLock.Builder builder = ZooKeeperProtos.TableLock.newBuilder();
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
return builder.build();
} catch (IOException ex) {
LOG.warn("Exception in deserialization", ex);
}
return null;

View File

@ -7453,8 +7453,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
Message request = service.getRequestPrototype(methodDesc).newBuilderForType()
.mergeFrom(call.getRequest()).build();
Message.Builder builder = service.getRequestPrototype(methodDesc).newBuilderForType();
ProtobufUtil.mergeFrom(builder, call.getRequest());
Message request = builder.build();
if (coprocessorHost != null) {
request = coprocessorHost.preEndpointInvocation(service, methodName, request);

View File

@ -3235,9 +3235,9 @@ public class HRegionServer extends HasThread implements
throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName
+ " called on service " + serviceName);
}
Message request =
service.getRequestPrototype(methodDesc).newBuilderForType().mergeFrom(call.getRequest())
.build();
Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType();
ProtobufUtil.mergeFrom(builderForType, call.getRequest());
Message request = builderForType.build();
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, serviceController, request, new RpcCallback<Message>() {

View File

@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
@ -35,8 +34,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* A Protobuf based WAL has the following structure:
@ -330,9 +329,9 @@ public class ProtobufLogReader extends ReaderBase {
"inputStream.available()= " + this.inputStream.available() + ", " +
"entry size= " + size);
}
final InputStream limitedInput = new LimitInputStream(this.inputStream, size);
builder.mergeFrom(limitedInput);
} catch (InvalidProtocolBufferException ipbe) {
ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size),
(int)size);
} catch (IOException ipbe) {
throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +
originalPosition + ", currentPosition=" + this.inputStream.getPos() +
", messageSize=" + size + ", currentAvailable=" + available).initCause(ipbe);

View File

@ -72,7 +72,6 @@ import org.apache.hadoop.io.Text;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Maintains lists of permission grants to users and groups to allow for
@ -594,11 +593,11 @@ public class AccessControlLists {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
AccessControlProtos.UsersAndPermissions perms =
AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
data, pblen, data.length - pblen).build();
return ProtobufUtil.toUserTablePermissions(perms);
} catch (InvalidProtocolBufferException e) {
AccessControlProtos.UsersAndPermissions.Builder builder =
AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
return ProtobufUtil.toUserTablePermissions(builder.build());
} catch (IOException e) {
throw new DeserializationException(e);
}
} else {
@ -665,9 +664,13 @@ public class AccessControlLists {
Tag tag = tagsIterator.next();
if (tag.getType() == ACL_TAG_TYPE) {
// Deserialize the table permissions from the KV
ListMultimap<String,Permission> kvPerms = ProtobufUtil.toUsersAndPermissions(
AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build());
// TODO: This can be improved. Don't build UsersAndPermissions just to unpack it again,
// use the builder
AccessControlProtos.UsersAndPermissions.Builder builder =
AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
ListMultimap<String,Permission> kvPerms =
ProtobufUtil.toUsersAndPermissions(builder.build());
// Are there permissions for this user?
List<Permission> userPerms = kvPerms.get(user.getShortName());
if (userPerms != null) {

View File

@ -1969,9 +1969,13 @@ public class AccessController extends BaseMasterAndRegionObserver
tags.add(tag);
} else {
// Merge the perms from the older ACL into the current permission set
ListMultimap<String,Permission> kvPerms = ProtobufUtil.toUsersAndPermissions(
AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build());
// TODO: The efficiency of this can be improved. Don't build just to unpack
// again, use the builder
AccessControlProtos.UsersAndPermissions.Builder builder =
AccessControlProtos.UsersAndPermissions.newBuilder();
ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
ListMultimap<String,Permission> kvPerms =
ProtobufUtil.toUsersAndPermissions(builder.build());
perms.putAll(kvPerms);
}
}

View File

@ -689,7 +689,8 @@ class HbaseObjectWritableFor96Migration implements Writable, WritableWithSize, C
byte [] scanBytes = new byte[length];
in.readFully(scanBytes);
ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
ProtobufUtil.mergeFrom(scanProto, scanBytes);
instance = ProtobufUtil.toScan(scanProto.build());
} else { // Writable or Serializable
Class instanceClass = null;
int b = (byte)WritableUtils.readVInt(in);

View File

@ -63,8 +63,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.util.ReflectionUtils;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Utility method to support visibility
*/
@ -131,10 +129,10 @@ public class VisibilityUtils {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder()
.mergeFrom(data, pblen, data.length - pblen).build();
return request.getVisLabelList();
} catch (InvalidProtocolBufferException e) {
VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
return builder.getVisLabelList();
} catch (IOException e) {
throw new DeserializationException(e);
}
}
@ -152,10 +150,10 @@ public class VisibilityUtils {
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
try {
MultiUserAuthorizations multiUserAuths = MultiUserAuthorizations.newBuilder()
.mergeFrom(data, pblen, data.length - pblen).build();
return multiUserAuths;
} catch (InvalidProtocolBufferException e) {
MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
return builder.build();
} catch (IOException e) {
throw new DeserializationException(e);
}
}

View File

@ -82,7 +82,6 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import com.google.common.primitives.Ints;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Utility methods for interacting with the underlying file system.
@ -605,11 +604,10 @@ public abstract class FSUtils {
int pblen = ProtobufUtil.lengthOfPBMagic();
FSProtos.HBaseVersionFileContent.Builder builder =
FSProtos.HBaseVersionFileContent.newBuilder();
FSProtos.HBaseVersionFileContent fileContent;
try {
fileContent = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
return fileContent.getVersion();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
return builder.getVersion();
} catch (IOException e) {
// Convert
throw new DeserializationException(e);
}

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.zookeeper;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -27,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.KeeperException;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Tracks the load balancer state up in ZK
*/
@ -85,8 +85,8 @@ public class LoadBalancerTracker extends ZooKeeperNodeTracker {
LoadBalancerProtos.LoadBalancerState.newBuilder();
try {
int magicLen = ProtobufUtil.lengthOfPBMagic();
builder.mergeFrom(pbBytes, magicLen, pbBytes.length - magicLen);
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, pbBytes, magicLen, pbBytes.length - magicLen);
} catch (IOException e) {
throw new DeserializationException(e);
}
return builder.build();

View File

@ -87,7 +87,7 @@ public class RegionServerTracker extends ZooKeeperListener {
byte[] data = ZKUtil.getData(watcher, nodePath);
if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) {
int magicLen = ProtobufUtil.lengthOfPBMagic();
rsInfoBuilder.mergeFrom(data, magicLen, data.length - magicLen);
ProtobufUtil.mergeFrom(rsInfoBuilder, data, magicLen, data.length - magicLen);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Added tracking of RS " + nodePath);

View File

@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.zookeeper;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.Arrays;
import java.util.HashMap;
@ -37,8 +38,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.zookeeper.KeeperException;
import com.google.protobuf.InvalidProtocolBufferException;
/**
* Implementation of TableStateManager which reads, caches and sets state
* up in ZooKeeper. If multiple read/write clients, will make for confusion.
@ -348,9 +347,9 @@ public class ZKTableStateManager implements TableStateManager {
ProtobufUtil.expectPBMagicPrefix(data);
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
int magicLen = ProtobufUtil.lengthOfPBMagic();
ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build();
return t.getState();
} catch (InvalidProtocolBufferException e) {
ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen);
return builder.getState();
} catch (IOException e) {
KeeperException ke = new KeeperException.DataInconsistencyException();
ke.initCause(e);
throw ke;

View File

@ -0,0 +1,115 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.experimental.categories.Category;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.mapreduce.MutationSerialization;
import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serializer;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@Category(SmallTests.class)
public class TestSerialization {
@Rule public TestName name = new TestName();
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Configuration conf;
private static final byte [] row = Bytes.toBytes("row1");
private static final byte [] qualifier = Bytes.toBytes("qualifier1");
private static final byte [] family = Bytes.toBytes("family1");
private static final byte [] value = new byte[100 * 1024 * 1024];
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf = TEST_UTIL.getConfiguration();
conf.setInt("hbase.client.keyvalue.maxsize", Integer.MAX_VALUE);
}
@Test
public void testLargeMutation()
throws Exception {
Put put = new Put(row);
put.addColumn(family, qualifier, value);
MutationSerialization serialization = new MutationSerialization();
Serializer<Mutation> serializer = serialization.getSerializer(Mutation.class);
Deserializer<Mutation> deserializer = serialization.getDeserializer(Mutation.class);
ByteArrayOutputStream os = new ByteArrayOutputStream();
ByteArrayInputStream is = null;
try {
serializer.open(os);
serializer.serialize(put);
os.flush();
is = new ByteArrayInputStream(os.toByteArray());
deserializer.open(is);
deserializer.deserialize(null);
} catch (InvalidProtocolBufferException e) {
assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(),
e.getCause() instanceof InvalidProtocolBufferException);
} catch (Exception e) {
fail("Got an invalid exception: " + e);
}
}
@Test
public void testLargeResult()
throws Exception {
Result res = Result.create(new KeyValue[] {new KeyValue(row, family, qualifier, 0L, value)});
ResultSerialization serialization = new ResultSerialization();
Serializer<Result> serializer = serialization.getSerializer(Result.class);
Deserializer<Result> deserializer = serialization.getDeserializer(Result.class);
ByteArrayOutputStream os = new ByteArrayOutputStream();
ByteArrayInputStream is = null;
try {
serializer.open(os);
serializer.serialize(res);
os.flush();
is = new ByteArrayInputStream(os.toByteArray());
deserializer.open(is);
deserializer.deserialize(null);
} catch (InvalidProtocolBufferException e) {
assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(),
e.getCause() instanceof InvalidProtocolBufferException);
} catch (Exception e) {
fail("Got an invalid exception: " + e);
}
}
}