HBASE-13825 Use ProtobufUtil#mergeFrom and ProtobufUtil#mergeDelimitedFrom in place of builder methods of same name
Incorporates HBASE-14076
This commit is contained in:
parent
51061f08a3
commit
b194052ec0
|
@ -18,13 +18,13 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -66,8 +66,9 @@ public class ClusterId {
|
||||||
ClusterIdProtos.ClusterId.Builder builder = ClusterIdProtos.ClusterId.newBuilder();
|
ClusterIdProtos.ClusterId.Builder builder = ClusterIdProtos.ClusterId.newBuilder();
|
||||||
ClusterIdProtos.ClusterId cid = null;
|
ClusterIdProtos.ClusterId cid = null;
|
||||||
try {
|
try {
|
||||||
cid = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
cid = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return convert(cid);
|
return convert(cid);
|
||||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.hadoop.io.WritableComparable;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An HColumnDescriptor contains information about a column family such as the
|
* An HColumnDescriptor contains information about a column family such as the
|
||||||
|
@ -1420,8 +1419,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
||||||
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
|
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
|
||||||
ColumnFamilySchema cfs = null;
|
ColumnFamilySchema cfs = null;
|
||||||
try {
|
try {
|
||||||
cfs = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
cfs = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return convert(cfs);
|
return convert(cfs);
|
||||||
|
|
|
@ -48,8 +48,6 @@ import org.apache.hadoop.hbase.util.Pair;
|
||||||
import org.apache.hadoop.hbase.util.PairOfSameType;
|
import org.apache.hadoop.hbase.util.PairOfSameType;
|
||||||
import org.apache.hadoop.io.DataInputBuffer;
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Information about a region. A region is a range of keys in the whole keyspace of a table, an
|
* Information about a region. A region is a range of keys in the whole keyspace of a table, an
|
||||||
* identifier (a timestamp) for differentiating between subset ranges (after region split)
|
* identifier (a timestamp) for differentiating between subset ranges (after region split)
|
||||||
|
@ -1118,11 +1116,11 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
|
||||||
if (ProtobufUtil.isPBMagicPrefix(bytes, offset, len)) {
|
if (ProtobufUtil.isPBMagicPrefix(bytes, offset, len)) {
|
||||||
int pblen = ProtobufUtil.lengthOfPBMagic();
|
int pblen = ProtobufUtil.lengthOfPBMagic();
|
||||||
try {
|
try {
|
||||||
HBaseProtos.RegionInfo ri =
|
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
|
||||||
HBaseProtos.RegionInfo.newBuilder().
|
ProtobufUtil.mergeFrom(builder, bytes, pblen + offset, len - pblen);
|
||||||
mergeFrom(bytes, pblen + offset, len - pblen).build();
|
HBaseProtos.RegionInfo ri = builder.build();
|
||||||
return convert(ri);
|
return convert(ri);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -56,8 +56,6 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.WritableComparable;
|
import org.apache.hadoop.io.WritableComparable;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HTableDescriptor contains the details about an HBase table such as the descriptors of
|
* HTableDescriptor contains the details about an HBase table such as the descriptors of
|
||||||
* all the column families, is the table a catalog table, <code> -ROOT- </code> or
|
* all the column families, is the table a catalog table, <code> -ROOT- </code> or
|
||||||
|
@ -1585,8 +1583,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
|
||||||
TableSchema.Builder builder = TableSchema.newBuilder();
|
TableSchema.Builder builder = TableSchema.newBuilder();
|
||||||
TableSchema ts;
|
TableSchema ts;
|
||||||
try {
|
try {
|
||||||
ts = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
ts = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return convert(ts);
|
return convert(ts);
|
||||||
|
|
|
@ -17,8 +17,9 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
|
@ -121,10 +122,11 @@ public class RegionTransition {
|
||||||
ProtobufUtil.expectPBMagicPrefix(data);
|
ProtobufUtil.expectPBMagicPrefix(data);
|
||||||
try {
|
try {
|
||||||
int prefixLen = ProtobufUtil.lengthOfPBMagic();
|
int prefixLen = ProtobufUtil.lengthOfPBMagic();
|
||||||
ZooKeeperProtos.RegionTransition rt = ZooKeeperProtos.RegionTransition.newBuilder().
|
ZooKeeperProtos.RegionTransition.Builder builder =
|
||||||
mergeFrom(data, prefixLen, data.length - prefixLen).build();
|
ZooKeeperProtos.RegionTransition.newBuilder();
|
||||||
return new RegionTransition(rt);
|
ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
return new RegionTransition(builder.build());
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,6 @@ import org.apache.hadoop.hbase.util.Threads;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.protobuf.Descriptors;
|
import com.google.protobuf.Descriptors;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
import com.google.protobuf.Message;
|
import com.google.protobuf.Message;
|
||||||
import com.google.protobuf.Service;
|
import com.google.protobuf.Service;
|
||||||
import com.google.protobuf.ServiceException;
|
import com.google.protobuf.ServiceException;
|
||||||
|
@ -1884,10 +1883,10 @@ public class HTable implements HTableInterface, RegionLocator {
|
||||||
", value=" + serviceResult.getValue().getValue());
|
", value=" + serviceResult.getValue().getValue());
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
callback.update(region, row,
|
Message.Builder builder = responsePrototype.newBuilderForType();
|
||||||
(R) responsePrototype.newBuilderForType().mergeFrom(
|
ProtobufUtil.mergeFrom(builder, serviceResult.getValue().getValue());
|
||||||
serviceResult.getValue().getValue()).build());
|
callback.update(region, row, (R) builder.build());
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Unexpected response type from endpoint " + methodDescriptor.getFullName(),
|
LOG.error("Unexpected response type from endpoint " + methodDescriptor.getFullName(),
|
||||||
e);
|
e);
|
||||||
callbackErrorExceptions.add(e);
|
callbackErrorExceptions.add(e);
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.CellScanner;
|
import org.apache.hadoop.hbase.CellScanner;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
|
||||||
|
@ -90,7 +91,7 @@ public class AsyncServerResponseHandler extends ChannelInboundHandlerAdapter {
|
||||||
// Call may be null because it may have timedout and been cleaned up on this side already
|
// Call may be null because it may have timedout and been cleaned up on this side already
|
||||||
if (call.responseDefaultType != null) {
|
if (call.responseDefaultType != null) {
|
||||||
Message.Builder builder = call.responseDefaultType.newBuilderForType();
|
Message.Builder builder = call.responseDefaultType.newBuilderForType();
|
||||||
builder.mergeDelimitedFrom(in);
|
ProtobufUtil.mergeDelimitedFrom(builder, in);
|
||||||
value = builder.build();
|
value = builder.build();
|
||||||
}
|
}
|
||||||
CellScanner cellBlockScanner = null;
|
CellScanner cellBlockScanner = null;
|
||||||
|
|
|
@ -68,8 +68,9 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
|
||||||
CoprocessorServiceResponse result = ProtobufUtil.execService(connection.getMaster(), call);
|
CoprocessorServiceResponse result = ProtobufUtil.execService(connection.getMaster(), call);
|
||||||
Message response = null;
|
Message response = null;
|
||||||
if (result.getValue().hasValue()) {
|
if (result.getValue().hasValue()) {
|
||||||
response = responsePrototype.newBuilderForType()
|
Message.Builder builder = responsePrototype.newBuilderForType();
|
||||||
.mergeFrom(result.getValue().getValue()).build();
|
ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
|
||||||
|
response = builder.build();
|
||||||
} else {
|
} else {
|
||||||
response = responsePrototype.getDefaultInstanceForType();
|
response = responsePrototype.getDefaultInstanceForType();
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,8 +96,9 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
|
||||||
.callWithRetries(callable, operationTimeout);
|
.callWithRetries(callable, operationTimeout);
|
||||||
Message response = null;
|
Message response = null;
|
||||||
if (result.getValue().hasValue()) {
|
if (result.getValue().hasValue()) {
|
||||||
response = responsePrototype.newBuilderForType()
|
Message.Builder builder = responsePrototype.newBuilderForType();
|
||||||
.mergeFrom(result.getValue().getValue()).build();
|
ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
|
||||||
|
response = builder.build();
|
||||||
} else {
|
} else {
|
||||||
response = responsePrototype.getDefaultInstanceForType();
|
response = responsePrototype.getDefaultInstanceForType();
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,8 +62,9 @@ public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
|
||||||
ProtobufUtil.execRegionServerService(connection.getClient(serverName), call);
|
ProtobufUtil.execRegionServerService(connection.getClient(serverName), call);
|
||||||
Message response = null;
|
Message response = null;
|
||||||
if (result.getValue().hasValue()) {
|
if (result.getValue().hasValue()) {
|
||||||
response =
|
Message.Builder builder = responsePrototype.newBuilderForType();
|
||||||
responsePrototype.newBuilderForType().mergeFrom(result.getValue().getValue()).build();
|
ProtobufUtil.mergeFrom(builder, result.getValue().getValue());
|
||||||
|
response = builder.build();
|
||||||
} else {
|
} else {
|
||||||
response = responsePrototype.getDefaultInstanceForType();
|
response = responsePrototype.getDefaultInstanceForType();
|
||||||
}
|
}
|
||||||
|
|
|
@ -959,7 +959,7 @@ public class RpcClientImpl extends AbstractRpcClient {
|
||||||
Message value = null;
|
Message value = null;
|
||||||
if (call.responseDefaultType != null) {
|
if (call.responseDefaultType != null) {
|
||||||
Builder builder = call.responseDefaultType.newBuilderForType();
|
Builder builder = call.responseDefaultType.newBuilderForType();
|
||||||
builder.mergeDelimitedFrom(in);
|
ProtobufUtil.mergeDelimitedFrom(builder, in);
|
||||||
value = builder.build();
|
value = builder.build();
|
||||||
}
|
}
|
||||||
CellScanner cellBlockScanner = null;
|
CellScanner cellBlockScanner = null;
|
||||||
|
|
|
@ -22,6 +22,7 @@ import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpeci
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
import java.lang.reflect.InvocationTargetException;
|
import java.lang.reflect.InvocationTargetException;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
|
@ -67,6 +68,7 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
|
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
|
||||||
import org.apache.hadoop.hbase.filter.Filter;
|
import org.apache.hadoop.hbase.filter.Filter;
|
||||||
|
import org.apache.hadoop.hbase.io.LimitInputStream;
|
||||||
import org.apache.hadoop.hbase.io.TimeRange;
|
import org.apache.hadoop.hbase.io.TimeRange;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
|
||||||
|
@ -155,6 +157,7 @@ import com.google.common.collect.ArrayListMultimap;
|
||||||
import com.google.common.collect.ListMultimap;
|
import com.google.common.collect.ListMultimap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.protobuf.ByteString;
|
import com.google.protobuf.ByteString;
|
||||||
|
import com.google.protobuf.CodedInputStream;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
import com.google.protobuf.InvalidProtocolBufferException;
|
||||||
import com.google.protobuf.Message;
|
import com.google.protobuf.Message;
|
||||||
import com.google.protobuf.Parser;
|
import com.google.protobuf.Parser;
|
||||||
|
@ -2765,8 +2768,9 @@ public final class ProtobufUtil {
|
||||||
ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder();
|
ClientProtos.CellVisibility.Builder builder = ClientProtos.CellVisibility.newBuilder();
|
||||||
ClientProtos.CellVisibility proto = null;
|
ClientProtos.CellVisibility proto = null;
|
||||||
try {
|
try {
|
||||||
proto = builder.mergeFrom(protoBytes).build();
|
ProtobufUtil.mergeFrom(builder, protoBytes);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
proto = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return toCellVisibility(proto);
|
return toCellVisibility(proto);
|
||||||
|
@ -2807,8 +2811,9 @@ public final class ProtobufUtil {
|
||||||
ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder();
|
ClientProtos.Authorizations.Builder builder = ClientProtos.Authorizations.newBuilder();
|
||||||
ClientProtos.Authorizations proto = null;
|
ClientProtos.Authorizations proto = null;
|
||||||
try {
|
try {
|
||||||
proto = builder.mergeFrom(protoBytes).build();
|
ProtobufUtil.mergeFrom(builder, protoBytes);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
proto = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return toAuthorizations(proto);
|
return toAuthorizations(proto);
|
||||||
|
@ -3070,6 +3075,104 @@ public final class ProtobufUtil {
|
||||||
return desc.build();
|
return desc.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This version of protobuf's mergeDelimitedFrom avoids the hard-coded 64MB limit for decoding
|
||||||
|
* buffers
|
||||||
|
* @param builder current message builder
|
||||||
|
* @param in Inputsream with delimited protobuf data
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void mergeDelimitedFrom(Message.Builder builder, InputStream in)
|
||||||
|
throws IOException {
|
||||||
|
// This used to be builder.mergeDelimitedFrom(in);
|
||||||
|
// but is replaced to allow us to bump the protobuf size limit.
|
||||||
|
final int firstByte = in.read();
|
||||||
|
if (firstByte != -1) {
|
||||||
|
final int size = CodedInputStream.readRawVarint32(firstByte, in);
|
||||||
|
final InputStream limitedInput = new LimitInputStream(in, size);
|
||||||
|
final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
|
||||||
|
codedInput.setSizeLimit(size);
|
||||||
|
builder.mergeFrom(codedInput);
|
||||||
|
codedInput.checkLastTagWas(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
|
||||||
|
* buffers where the message size is known
|
||||||
|
* @param builder current message builder
|
||||||
|
* @param in InputStream containing protobuf data
|
||||||
|
* @param size known size of protobuf data
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void mergeFrom(Message.Builder builder, InputStream in, int size)
|
||||||
|
throws IOException {
|
||||||
|
final CodedInputStream codedInput = CodedInputStream.newInstance(in);
|
||||||
|
codedInput.setSizeLimit(size);
|
||||||
|
builder.mergeFrom(codedInput);
|
||||||
|
codedInput.checkLastTagWas(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
|
||||||
|
* buffers where the message size is not known
|
||||||
|
* @param builder current message builder
|
||||||
|
* @param in InputStream containing protobuf data
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void mergeFrom(Message.Builder builder, InputStream in)
|
||||||
|
throws IOException {
|
||||||
|
final CodedInputStream codedInput = CodedInputStream.newInstance(in);
|
||||||
|
codedInput.setSizeLimit(Integer.MAX_VALUE);
|
||||||
|
builder.mergeFrom(codedInput);
|
||||||
|
codedInput.checkLastTagWas(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
|
||||||
|
* buffers when working with ByteStrings
|
||||||
|
* @param builder current message builder
|
||||||
|
* @param bs ByteString containing the
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void mergeFrom(Message.Builder builder, ByteString bs) throws IOException {
|
||||||
|
final CodedInputStream codedInput = bs.newCodedInput();
|
||||||
|
codedInput.setSizeLimit(bs.size());
|
||||||
|
builder.mergeFrom(codedInput);
|
||||||
|
codedInput.checkLastTagWas(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
|
||||||
|
* buffers when working with byte arrays
|
||||||
|
* @param builder current message builder
|
||||||
|
* @param b byte array
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException {
|
||||||
|
final CodedInputStream codedInput = CodedInputStream.newInstance(b);
|
||||||
|
codedInput.setSizeLimit(b.length);
|
||||||
|
builder.mergeFrom(codedInput);
|
||||||
|
codedInput.checkLastTagWas(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding
|
||||||
|
* buffers when working with byte arrays
|
||||||
|
* @param builder current message builder
|
||||||
|
* @param b byte array
|
||||||
|
* @param offset
|
||||||
|
* @param length
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length)
|
||||||
|
throws IOException {
|
||||||
|
final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length);
|
||||||
|
codedInput.setSizeLimit(length);
|
||||||
|
builder.mergeFrom(codedInput);
|
||||||
|
codedInput.checkLastTagWas(0);
|
||||||
|
}
|
||||||
|
|
||||||
public static ReplicationLoadSink toReplicationLoadSink(
|
public static ReplicationLoadSink toReplicationLoadSink(
|
||||||
ClusterStatusProtos.ReplicationLoadSink cls) {
|
ClusterStatusProtos.ReplicationLoadSink cls) {
|
||||||
return new ReplicationLoadSink(cls.getAgeOfLastAppliedOp(), cls.getTimeStampsOfLastAppliedOp());
|
return new ReplicationLoadSink(cls.getAgeOfLastAppliedOp(), cls.getTimeStampsOfLastAppliedOp());
|
||||||
|
|
|
@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||||
import org.apache.zookeeper.KeeperException;
|
import org.apache.zookeeper.KeeperException;
|
||||||
import org.apache.zookeeper.KeeperException.NodeExistsException;
|
import org.apache.zookeeper.KeeperException.NodeExistsException;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closeable {
|
public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closeable {
|
||||||
private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class);
|
private static final Log LOG = LogFactory.getLog(ReplicationPeerZKImpl.class);
|
||||||
|
@ -199,9 +197,10 @@ public class ReplicationPeerZKImpl implements ReplicationPeer, Abortable, Closea
|
||||||
ZooKeeperProtos.ReplicationState.newBuilder();
|
ZooKeeperProtos.ReplicationState.newBuilder();
|
||||||
ZooKeeperProtos.ReplicationState state;
|
ZooKeeperProtos.ReplicationState state;
|
||||||
try {
|
try {
|
||||||
state = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
|
state = builder.build();
|
||||||
return state.getState();
|
return state.getState();
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
|
||||||
import org.apache.zookeeper.KeeperException;
|
import org.apache.zookeeper.KeeperException;
|
||||||
|
|
||||||
import com.google.protobuf.ByteString;
|
import com.google.protobuf.ByteString;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class provides an implementation of the ReplicationPeers interface using Zookeeper. The
|
* This class provides an implementation of the ReplicationPeers interface using Zookeeper. The
|
||||||
|
@ -492,8 +491,9 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
|
||||||
ZooKeeperProtos.ReplicationPeer.newBuilder();
|
ZooKeeperProtos.ReplicationPeer.newBuilder();
|
||||||
ZooKeeperProtos.ReplicationPeer peer;
|
ZooKeeperProtos.ReplicationPeer peer;
|
||||||
try {
|
try {
|
||||||
peer = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
peer = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return convert(peer);
|
return convert(peer);
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.token;
|
||||||
|
|
||||||
import com.google.protobuf.ByteString;
|
import com.google.protobuf.ByteString;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
@ -136,8 +137,10 @@ public class AuthenticationTokenIdentifier extends TokenIdentifier {
|
||||||
int len = in.readInt();
|
int len = in.readInt();
|
||||||
byte[] inBytes = new byte[len];
|
byte[] inBytes = new byte[len];
|
||||||
in.readFully(inBytes);
|
in.readFully(inBytes);
|
||||||
AuthenticationProtos.TokenIdentifier identifier =
|
AuthenticationProtos.TokenIdentifier.Builder builder =
|
||||||
AuthenticationProtos.TokenIdentifier.newBuilder().mergeFrom(inBytes).build();
|
AuthenticationProtos.TokenIdentifier.newBuilder();
|
||||||
|
ProtobufUtil.mergeFrom(builder, inBytes);
|
||||||
|
AuthenticationProtos.TokenIdentifier identifier = builder.build();
|
||||||
// sanity check on type
|
// sanity check on type
|
||||||
if (!identifier.hasKind() ||
|
if (!identifier.hasKind() ||
|
||||||
identifier.getKind() != AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN) {
|
identifier.getKind() != AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN) {
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.zookeeper;
|
package org.apache.hadoop.hbase.zookeeper;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
|
@ -28,6 +26,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
|
||||||
import org.apache.zookeeper.KeeperException;
|
import org.apache.zookeeper.KeeperException;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -189,9 +188,9 @@ public class ZKTableStateClientSideReader {
|
||||||
ProtobufUtil.expectPBMagicPrefix(data);
|
ProtobufUtil.expectPBMagicPrefix(data);
|
||||||
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
|
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
|
||||||
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
||||||
ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build();
|
ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen);
|
||||||
return t.getState();
|
return builder.getState();
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
KeeperException ke = new KeeperException.DataInconsistencyException();
|
KeeperException ke = new KeeperException.DataInconsistencyException();
|
||||||
ke.initCause(e);
|
ke.initCause(e);
|
||||||
throw ke;
|
throw ke;
|
||||||
|
|
|
@ -1956,12 +1956,14 @@ public class ZKUtil {
|
||||||
}
|
}
|
||||||
// parse the data of the above peer znode.
|
// parse the data of the above peer znode.
|
||||||
try {
|
try {
|
||||||
String clusterKey = ZooKeeperProtos.ReplicationPeer.newBuilder().
|
ZooKeeperProtos.ReplicationPeer.Builder builder =
|
||||||
mergeFrom(data, pblen, data.length - pblen).getClusterkey();
|
ZooKeeperProtos.ReplicationPeer.newBuilder();
|
||||||
sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey);
|
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
|
||||||
// add the peer-state.
|
String clusterKey = builder.getClusterkey();
|
||||||
appendPeerState(zkw, znodeToProcess, sb);
|
sb.append("\n").append(znodeToProcess).append(": ").append(clusterKey);
|
||||||
} catch (InvalidProtocolBufferException ipbe) {
|
// add the peer-state.
|
||||||
|
appendPeerState(zkw, znodeToProcess, sb);
|
||||||
|
} catch (IOException ipbe) {
|
||||||
LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
|
LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1979,8 +1981,12 @@ public class ZKUtil {
|
||||||
byte[] peerStateData;
|
byte[] peerStateData;
|
||||||
try {
|
try {
|
||||||
peerStateData = ZKUtil.getData(zkw, peerStateZnode);
|
peerStateData = ZKUtil.getData(zkw, peerStateZnode);
|
||||||
sb.append(ZooKeeperProtos.ReplicationState.newBuilder()
|
ZooKeeperProtos.ReplicationState.Builder builder =
|
||||||
.mergeFrom(peerStateData, pblen, peerStateData.length - pblen).getState().name());
|
ZooKeeperProtos.ReplicationState.newBuilder();
|
||||||
|
ProtobufUtil.mergeFrom(builder, peerStateData, pblen, peerStateData.length - pblen);
|
||||||
|
sb.append(builder.getState().name());
|
||||||
|
} catch (IOException ipbe) {
|
||||||
|
LOG.warn("Got Exception while parsing peer: " + znodeToProcess, ipbe);
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
zkw.interruptedException(e);
|
zkw.interruptedException(e);
|
||||||
return;
|
return;
|
||||||
|
@ -2201,8 +2207,9 @@ public class ZKUtil {
|
||||||
ZooKeeperProtos.ReplicationHLogPosition.newBuilder();
|
ZooKeeperProtos.ReplicationHLogPosition.newBuilder();
|
||||||
ZooKeeperProtos.ReplicationHLogPosition position;
|
ZooKeeperProtos.ReplicationHLogPosition position;
|
||||||
try {
|
try {
|
||||||
position = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
position = builder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return position.getPosition();
|
return position.getPosition();
|
||||||
|
@ -2257,8 +2264,9 @@ public class ZKUtil {
|
||||||
int pblen = ProtobufUtil.lengthOfPBMagic();
|
int pblen = ProtobufUtil.lengthOfPBMagic();
|
||||||
RegionStoreSequenceIds storeIds = null;
|
RegionStoreSequenceIds storeIds = null;
|
||||||
try {
|
try {
|
||||||
storeIds = regionSequenceIdsBuilder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(regionSequenceIdsBuilder, bytes, pblen, bytes.length - pblen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
storeIds = regionSequenceIdsBuilder.build();
|
||||||
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return storeIds;
|
return storeIds;
|
||||||
|
|
|
@ -37,6 +37,7 @@ public class PBCell extends PBType<CellProtos.Cell> {
|
||||||
public int skip(PositionedByteRange src) {
|
public int skip(PositionedByteRange src) {
|
||||||
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
|
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
|
||||||
CodedInputStream is = inputStreamFromByteRange(src);
|
CodedInputStream is = inputStreamFromByteRange(src);
|
||||||
|
is.setSizeLimit(src.getLength());
|
||||||
try {
|
try {
|
||||||
builder.mergeFrom(is);
|
builder.mergeFrom(is);
|
||||||
int consumed = is.getTotalBytesRead();
|
int consumed = is.getTotalBytesRead();
|
||||||
|
@ -51,6 +52,7 @@ public class PBCell extends PBType<CellProtos.Cell> {
|
||||||
public CellProtos.Cell decode(PositionedByteRange src) {
|
public CellProtos.Cell decode(PositionedByteRange src) {
|
||||||
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
|
CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
|
||||||
CodedInputStream is = inputStreamFromByteRange(src);
|
CodedInputStream is = inputStreamFromByteRange(src);
|
||||||
|
is.setSizeLimit(src.getLength());
|
||||||
try {
|
try {
|
||||||
CellProtos.Cell ret = builder.mergeFrom(is).build();
|
CellProtos.Cell ret = builder.mergeFrom(is).build();
|
||||||
src.setPosition(src.getPosition() + is.getTotalBytesRead());
|
src.setPosition(src.getPosition() + is.getTotalBytesRead());
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
||||||
import org.codehaus.jackson.annotate.JsonProperty;
|
import org.codehaus.jackson.annotate.JsonProperty;
|
||||||
|
@ -198,7 +199,7 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Cell.Builder builder = Cell.newBuilder();
|
Cell.Builder builder = Cell.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
setColumn(builder.getColumn().toByteArray());
|
setColumn(builder.getColumn().toByteArray());
|
||||||
setValue(builder.getData().toByteArray());
|
setValue(builder.getData().toByteArray());
|
||||||
if (builder.hasTimestamp()) {
|
if (builder.hasTimestamp()) {
|
||||||
|
|
|
@ -32,6 +32,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
|
||||||
|
@ -133,7 +134,7 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
CellSet.Builder builder = CellSet.newBuilder();
|
CellSet.Builder builder = CellSet.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
for (CellSet.Row row: builder.getRowsList()) {
|
for (CellSet.Row row: builder.getRowsList()) {
|
||||||
RowModel rowModel = new RowModel(row.getKey().toByteArray());
|
RowModel rowModel = new RowModel(row.getKey().toByteArray());
|
||||||
for (Cell cell: row.getValuesList()) {
|
for (Cell cell: row.getValuesList()) {
|
||||||
|
|
|
@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.filter.SubstringComparator;
|
||||||
import org.apache.hadoop.hbase.filter.TimestampsFilter;
|
import org.apache.hadoop.hbase.filter.TimestampsFilter;
|
||||||
import org.apache.hadoop.hbase.filter.ValueFilter;
|
import org.apache.hadoop.hbase.filter.ValueFilter;
|
||||||
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
|
||||||
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
import org.apache.hadoop.hbase.security.visibility.Authorizations;
|
||||||
|
@ -827,7 +828,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Scanner.Builder builder = Scanner.newBuilder();
|
Scanner.Builder builder = Scanner.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
if (builder.hasStartRow()) {
|
if (builder.hasStartRow()) {
|
||||||
startRow = builder.getStartRow().toByteArray();
|
startRow = builder.getStartRow().toByteArray();
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,6 +31,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
@ -748,7 +749,7 @@ public class StorageClusterStatusModel
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
|
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
if (builder.hasRegions()) {
|
if (builder.hasRegions()) {
|
||||||
regions = builder.getRegions();
|
regions = builder.getRegions();
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
|
||||||
|
|
||||||
|
@ -146,7 +147,7 @@ public class TableInfoModel implements Serializable, ProtobufMessageHandler {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
TableInfo.Builder builder = TableInfo.newBuilder();
|
TableInfo.Builder builder = TableInfo.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
setName(builder.getName());
|
setName(builder.getName());
|
||||||
for (TableInfo.Region region: builder.getRegionsList()) {
|
for (TableInfo.Region region: builder.getRegionsList()) {
|
||||||
add(new TableRegionModel(builder.getName(), region.getId(),
|
add(new TableRegionModel(builder.getName(), region.getId(),
|
||||||
|
|
|
@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlElementRef;
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
|
||||||
|
|
||||||
|
@ -104,7 +105,7 @@ public class TableListModel implements Serializable, ProtobufMessageHandler {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
TableList.Builder builder = TableList.newBuilder();
|
TableList.Builder builder = TableList.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
for (String table: builder.getNameList()) {
|
for (String table: builder.getNameList()) {
|
||||||
this.add(new TableModel(table));
|
this.add(new TableModel(table));
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
|
||||||
|
@ -309,7 +310,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
TableSchema.Builder builder = TableSchema.newBuilder();
|
TableSchema.Builder builder = TableSchema.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
this.setName(builder.getName());
|
this.setName(builder.getName());
|
||||||
for (TableSchema.Attribute attr: builder.getAttrsList()) {
|
for (TableSchema.Attribute attr: builder.getAttrsList()) {
|
||||||
this.addAttribute(attr.getName(), attr.getValue());
|
this.addAttribute(attr.getName(), attr.getValue());
|
||||||
|
|
|
@ -27,6 +27,7 @@ import javax.xml.bind.annotation.XmlAttribute;
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||||
import org.apache.hadoop.hbase.rest.RESTServlet;
|
import org.apache.hadoop.hbase.rest.RESTServlet;
|
||||||
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
|
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
|
||||||
|
@ -188,7 +189,7 @@ public class VersionModel implements Serializable, ProtobufMessageHandler {
|
||||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Version.Builder builder = Version.newBuilder();
|
Version.Builder builder = Version.newBuilder();
|
||||||
builder.mergeFrom(message);
|
ProtobufUtil.mergeFrom(builder, message);
|
||||||
if (builder.hasRestVersion()) {
|
if (builder.hasRestVersion()) {
|
||||||
restVersion = builder.getRestVersion();
|
restVersion = builder.getRestVersion();
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
|
@ -25,8 +27,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
|
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* State of a WAL log split during distributed splitting. State is kept up in zookeeper.
|
* State of a WAL log split during distributed splitting. State is kept up in zookeeper.
|
||||||
* Encapsulates protobuf serialization/deserialization so we don't leak generated pb outside of
|
* Encapsulates protobuf serialization/deserialization so we don't leak generated pb outside of
|
||||||
|
@ -160,10 +160,10 @@ public class SplitLogTask {
|
||||||
ProtobufUtil.expectPBMagicPrefix(data);
|
ProtobufUtil.expectPBMagicPrefix(data);
|
||||||
try {
|
try {
|
||||||
int prefixLen = ProtobufUtil.lengthOfPBMagic();
|
int prefixLen = ProtobufUtil.lengthOfPBMagic();
|
||||||
ZooKeeperProtos.SplitLogTask slt = ZooKeeperProtos.SplitLogTask.newBuilder().
|
ZooKeeperProtos.SplitLogTask.Builder builder = ZooKeeperProtos.SplitLogTask.newBuilder();
|
||||||
mergeFrom(data, prefixLen, data.length - prefixLen).build();
|
ProtobufUtil.mergeFrom(builder, data, prefixLen, data.length - prefixLen);
|
||||||
return new SplitLogTask(slt);
|
return new SplitLogTask(builder.build());
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(Bytes.toStringBinary(data, 0, 64), e);
|
throw new DeserializationException(Bytes.toStringBinary(data, 0, 64), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -131,7 +131,6 @@ import com.google.protobuf.BlockingService;
|
||||||
import com.google.protobuf.CodedInputStream;
|
import com.google.protobuf.CodedInputStream;
|
||||||
import com.google.protobuf.Descriptors.MethodDescriptor;
|
import com.google.protobuf.Descriptors.MethodDescriptor;
|
||||||
import com.google.protobuf.Message;
|
import com.google.protobuf.Message;
|
||||||
import com.google.protobuf.Message.Builder;
|
|
||||||
import com.google.protobuf.ServiceException;
|
import com.google.protobuf.ServiceException;
|
||||||
import com.google.protobuf.TextFormat;
|
import com.google.protobuf.TextFormat;
|
||||||
|
|
||||||
|
@ -1778,7 +1777,9 @@ public class RpcServer implements RpcServerInterface {
|
||||||
CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length);
|
CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length);
|
||||||
int headerSize = cis.readRawVarint32();
|
int headerSize = cis.readRawVarint32();
|
||||||
offset = cis.getTotalBytesRead();
|
offset = cis.getTotalBytesRead();
|
||||||
RequestHeader header = RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build();
|
Message.Builder builder = RequestHeader.newBuilder();
|
||||||
|
ProtobufUtil.mergeFrom(builder, buf, offset, headerSize);
|
||||||
|
RequestHeader header = (RequestHeader) builder.build();
|
||||||
offset += headerSize;
|
offset += headerSize;
|
||||||
int id = header.getCallId();
|
int id = header.getCallId();
|
||||||
if (LOG.isTraceEnabled()) {
|
if (LOG.isTraceEnabled()) {
|
||||||
|
@ -1806,13 +1807,14 @@ public class RpcServer implements RpcServerInterface {
|
||||||
if (header.hasRequestParam() && header.getRequestParam()) {
|
if (header.hasRequestParam() && header.getRequestParam()) {
|
||||||
md = this.service.getDescriptorForType().findMethodByName(header.getMethodName());
|
md = this.service.getDescriptorForType().findMethodByName(header.getMethodName());
|
||||||
if (md == null) throw new UnsupportedOperationException(header.getMethodName());
|
if (md == null) throw new UnsupportedOperationException(header.getMethodName());
|
||||||
Builder builder = this.service.getRequestPrototype(md).newBuilderForType();
|
builder = this.service.getRequestPrototype(md).newBuilderForType();
|
||||||
// To read the varint, I need an inputstream; might as well be a CIS.
|
// To read the varint, I need an inputstream; might as well be a CIS.
|
||||||
cis = CodedInputStream.newInstance(buf, offset, buf.length);
|
cis = CodedInputStream.newInstance(buf, offset, buf.length);
|
||||||
int paramSize = cis.readRawVarint32();
|
int paramSize = cis.readRawVarint32();
|
||||||
offset += cis.getTotalBytesRead();
|
offset += cis.getTotalBytesRead();
|
||||||
if (builder != null) {
|
if (builder != null) {
|
||||||
param = builder.mergeFrom(buf, offset, paramSize).build();
|
ProtobufUtil.mergeFrom(builder, buf, offset, paramSize);
|
||||||
|
param = builder.build();
|
||||||
}
|
}
|
||||||
offset += paramSize;
|
offset += paramSize;
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.Delete;
|
||||||
import org.apache.hadoop.hbase.client.Mutation;
|
import org.apache.hadoop.hbase.client.Mutation;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
|
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
|
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
|
||||||
import org.apache.hadoop.io.serializer.Deserializer;
|
import org.apache.hadoop.io.serializer.Deserializer;
|
||||||
import org.apache.hadoop.io.serializer.Serialization;
|
import org.apache.hadoop.io.serializer.Serialization;
|
||||||
|
@ -57,7 +57,9 @@ public class MutationSerialization implements Serialization<Mutation> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mutation deserialize(Mutation mutation) throws IOException {
|
public Mutation deserialize(Mutation mutation) throws IOException {
|
||||||
MutationProto proto = MutationProto.parseDelimitedFrom(in);
|
ClientProtos.MutationProto.Builder builder = ClientProtos.MutationProto.newBuilder();
|
||||||
|
ProtobufUtil.mergeDelimitedFrom(builder, in);
|
||||||
|
ClientProtos.MutationProto proto = builder.build();
|
||||||
return ProtobufUtil.toMutation(proto);
|
return ProtobufUtil.toMutation(proto);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -125,7 +125,9 @@ public class ResultSerialization extends Configured implements Serialization<Res
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Result deserialize(Result mutation) throws IOException {
|
public Result deserialize(Result mutation) throws IOException {
|
||||||
ClientProtos.Result proto = ClientProtos.Result.parseDelimitedFrom(in);
|
ClientProtos.Result.Builder builder = ClientProtos.Result.newBuilder();
|
||||||
|
ProtobufUtil.mergeDelimitedFrom(builder, in);
|
||||||
|
ClientProtos.Result proto = builder.build();
|
||||||
return ProtobufUtil.toResult(proto);
|
return ProtobufUtil.toResult(proto);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -628,8 +628,9 @@ public class MasterRpcServices extends RSRpcServices
|
||||||
}
|
}
|
||||||
|
|
||||||
//invoke the method
|
//invoke the method
|
||||||
Message execRequest = service.getRequestPrototype(methodDesc).newBuilderForType()
|
Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType();
|
||||||
.mergeFrom(call.getRequest()).build();
|
ProtobufUtil.mergeFrom(builderForType, call.getRequest());
|
||||||
|
Message execRequest = builderForType.build();
|
||||||
final Message.Builder responseBuilder =
|
final Message.Builder responseBuilder =
|
||||||
service.getResponsePrototype(methodDesc).newBuilderForType();
|
service.getResponsePrototype(methodDesc).newBuilderForType();
|
||||||
service.callMethod(methodDesc, execController, execRequest, new RpcCallback<Message>() {
|
service.callMethod(methodDesc, execController, execRequest, new RpcCallback<Message>() {
|
||||||
|
|
|
@ -41,8 +41,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||||
import org.apache.hadoop.hbase.zookeeper.lock.ZKInterProcessReadWriteLock;
|
import org.apache.hadoop.hbase.zookeeper.lock.ZKInterProcessReadWriteLock;
|
||||||
import org.apache.zookeeper.KeeperException;
|
import org.apache.zookeeper.KeeperException;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A manager for distributed table level locks.
|
* A manager for distributed table level locks.
|
||||||
*/
|
*/
|
||||||
|
@ -213,10 +211,10 @@ public abstract class TableLockManager {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
ZooKeeperProtos.TableLock data = ZooKeeperProtos.TableLock.newBuilder().mergeFrom(
|
ZooKeeperProtos.TableLock.Builder builder = ZooKeeperProtos.TableLock.newBuilder();
|
||||||
bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
return data;
|
return builder.build();
|
||||||
} catch (InvalidProtocolBufferException ex) {
|
} catch (IOException ex) {
|
||||||
LOG.warn("Exception in deserialization", ex);
|
LOG.warn("Exception in deserialization", ex);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -7453,8 +7453,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
||||||
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
|
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Message request = service.getRequestPrototype(methodDesc).newBuilderForType()
|
Message.Builder builder = service.getRequestPrototype(methodDesc).newBuilderForType();
|
||||||
.mergeFrom(call.getRequest()).build();
|
ProtobufUtil.mergeFrom(builder, call.getRequest());
|
||||||
|
Message request = builder.build();
|
||||||
|
|
||||||
if (coprocessorHost != null) {
|
if (coprocessorHost != null) {
|
||||||
request = coprocessorHost.preEndpointInvocation(service, methodName, request);
|
request = coprocessorHost.preEndpointInvocation(service, methodName, request);
|
||||||
|
|
|
@ -3235,9 +3235,9 @@ public class HRegionServer extends HasThread implements
|
||||||
throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName
|
throw new UnknownProtocolException(service.getClass(), "Unknown method " + methodName
|
||||||
+ " called on service " + serviceName);
|
+ " called on service " + serviceName);
|
||||||
}
|
}
|
||||||
Message request =
|
Message.Builder builderForType = service.getRequestPrototype(methodDesc).newBuilderForType();
|
||||||
service.getRequestPrototype(methodDesc).newBuilderForType().mergeFrom(call.getRequest())
|
ProtobufUtil.mergeFrom(builderForType, call.getRequest());
|
||||||
.build();
|
Message request = builderForType.build();
|
||||||
final Message.Builder responseBuilder =
|
final Message.Builder responseBuilder =
|
||||||
service.getResponsePrototype(methodDesc).newBuilderForType();
|
service.getResponsePrototype(methodDesc).newBuilderForType();
|
||||||
service.callMethod(methodDesc, serviceController, request, new RpcCallback<Message>() {
|
service.callMethod(methodDesc, serviceController, request, new RpcCallback<Message>() {
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver.wal;
|
||||||
|
|
||||||
import java.io.EOFException;
|
import java.io.EOFException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -35,8 +34,9 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.hbase.codec.Codec;
|
import org.apache.hadoop.hbase.codec.Codec;
|
||||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
|
||||||
import org.apache.hadoop.hbase.io.LimitInputStream;
|
import org.apache.hadoop.hbase.io.LimitInputStream;
|
||||||
|
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||||
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder;
|
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
|
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey;
|
||||||
|
@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.wal.WAL.Entry;
|
import org.apache.hadoop.hbase.wal.WAL.Entry;
|
||||||
|
|
||||||
import com.google.protobuf.CodedInputStream;
|
import com.google.protobuf.CodedInputStream;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Protobuf based WAL has the following structure:
|
* A Protobuf based WAL has the following structure:
|
||||||
|
@ -330,9 +329,9 @@ public class ProtobufLogReader extends ReaderBase {
|
||||||
"inputStream.available()= " + this.inputStream.available() + ", " +
|
"inputStream.available()= " + this.inputStream.available() + ", " +
|
||||||
"entry size= " + size);
|
"entry size= " + size);
|
||||||
}
|
}
|
||||||
final InputStream limitedInput = new LimitInputStream(this.inputStream, size);
|
ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size),
|
||||||
builder.mergeFrom(limitedInput);
|
(int)size);
|
||||||
} catch (InvalidProtocolBufferException ipbe) {
|
} catch (IOException ipbe) {
|
||||||
throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +
|
throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +
|
||||||
originalPosition + ", currentPosition=" + this.inputStream.getPos() +
|
originalPosition + ", currentPosition=" + this.inputStream.getPos() +
|
||||||
", messageSize=" + size + ", currentAvailable=" + available).initCause(ipbe);
|
", messageSize=" + size + ", currentAvailable=" + available).initCause(ipbe);
|
||||||
|
|
|
@ -72,7 +72,6 @@ import org.apache.hadoop.io.Text;
|
||||||
import com.google.common.collect.ArrayListMultimap;
|
import com.google.common.collect.ArrayListMultimap;
|
||||||
import com.google.common.collect.ListMultimap;
|
import com.google.common.collect.ListMultimap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maintains lists of permission grants to users and groups to allow for
|
* Maintains lists of permission grants to users and groups to allow for
|
||||||
|
@ -594,11 +593,11 @@ public class AccessControlLists {
|
||||||
if (ProtobufUtil.isPBMagicPrefix(data)) {
|
if (ProtobufUtil.isPBMagicPrefix(data)) {
|
||||||
int pblen = ProtobufUtil.lengthOfPBMagic();
|
int pblen = ProtobufUtil.lengthOfPBMagic();
|
||||||
try {
|
try {
|
||||||
AccessControlProtos.UsersAndPermissions perms =
|
AccessControlProtos.UsersAndPermissions.Builder builder =
|
||||||
AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
|
AccessControlProtos.UsersAndPermissions.newBuilder();
|
||||||
data, pblen, data.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
|
||||||
return ProtobufUtil.toUserTablePermissions(perms);
|
return ProtobufUtil.toUserTablePermissions(builder.build());
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -665,9 +664,13 @@ public class AccessControlLists {
|
||||||
Tag tag = tagsIterator.next();
|
Tag tag = tagsIterator.next();
|
||||||
if (tag.getType() == ACL_TAG_TYPE) {
|
if (tag.getType() == ACL_TAG_TYPE) {
|
||||||
// Deserialize the table permissions from the KV
|
// Deserialize the table permissions from the KV
|
||||||
ListMultimap<String,Permission> kvPerms = ProtobufUtil.toUsersAndPermissions(
|
// TODO: This can be improved. Don't build UsersAndPermissions just to unpack it again,
|
||||||
AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
|
// use the builder
|
||||||
tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build());
|
AccessControlProtos.UsersAndPermissions.Builder builder =
|
||||||
|
AccessControlProtos.UsersAndPermissions.newBuilder();
|
||||||
|
ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
|
||||||
|
ListMultimap<String,Permission> kvPerms =
|
||||||
|
ProtobufUtil.toUsersAndPermissions(builder.build());
|
||||||
// Are there permissions for this user?
|
// Are there permissions for this user?
|
||||||
List<Permission> userPerms = kvPerms.get(user.getShortName());
|
List<Permission> userPerms = kvPerms.get(user.getShortName());
|
||||||
if (userPerms != null) {
|
if (userPerms != null) {
|
||||||
|
|
|
@ -1969,9 +1969,13 @@ public class AccessController extends BaseMasterAndRegionObserver
|
||||||
tags.add(tag);
|
tags.add(tag);
|
||||||
} else {
|
} else {
|
||||||
// Merge the perms from the older ACL into the current permission set
|
// Merge the perms from the older ACL into the current permission set
|
||||||
ListMultimap<String,Permission> kvPerms = ProtobufUtil.toUsersAndPermissions(
|
// TODO: The efficiency of this can be improved. Don't build just to unpack
|
||||||
AccessControlProtos.UsersAndPermissions.newBuilder().mergeFrom(
|
// again, use the builder
|
||||||
tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()).build());
|
AccessControlProtos.UsersAndPermissions.Builder builder =
|
||||||
|
AccessControlProtos.UsersAndPermissions.newBuilder();
|
||||||
|
ProtobufUtil.mergeFrom(builder, tag.getBuffer(), tag.getTagOffset(), tag.getTagLength());
|
||||||
|
ListMultimap<String,Permission> kvPerms =
|
||||||
|
ProtobufUtil.toUsersAndPermissions(builder.build());
|
||||||
perms.putAll(kvPerms);
|
perms.putAll(kvPerms);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -689,7 +689,8 @@ class HbaseObjectWritableFor96Migration implements Writable, WritableWithSize, C
|
||||||
byte [] scanBytes = new byte[length];
|
byte [] scanBytes = new byte[length];
|
||||||
in.readFully(scanBytes);
|
in.readFully(scanBytes);
|
||||||
ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
|
ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
|
||||||
instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
|
ProtobufUtil.mergeFrom(scanProto, scanBytes);
|
||||||
|
instance = ProtobufUtil.toScan(scanProto.build());
|
||||||
} else { // Writable or Serializable
|
} else { // Writable or Serializable
|
||||||
Class instanceClass = null;
|
Class instanceClass = null;
|
||||||
int b = (byte)WritableUtils.readVInt(in);
|
int b = (byte)WritableUtils.readVInt(in);
|
||||||
|
|
|
@ -63,8 +63,6 @@ import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
|
import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility method to support visibility
|
* Utility method to support visibility
|
||||||
*/
|
*/
|
||||||
|
@ -131,10 +129,10 @@ public class VisibilityUtils {
|
||||||
if (ProtobufUtil.isPBMagicPrefix(data)) {
|
if (ProtobufUtil.isPBMagicPrefix(data)) {
|
||||||
int pblen = ProtobufUtil.lengthOfPBMagic();
|
int pblen = ProtobufUtil.lengthOfPBMagic();
|
||||||
try {
|
try {
|
||||||
VisibilityLabelsRequest request = VisibilityLabelsRequest.newBuilder()
|
VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder();
|
||||||
.mergeFrom(data, pblen, data.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
|
||||||
return request.getVisLabelList();
|
return builder.getVisLabelList();
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -152,10 +150,10 @@ public class VisibilityUtils {
|
||||||
if (ProtobufUtil.isPBMagicPrefix(data)) {
|
if (ProtobufUtil.isPBMagicPrefix(data)) {
|
||||||
int pblen = ProtobufUtil.lengthOfPBMagic();
|
int pblen = ProtobufUtil.lengthOfPBMagic();
|
||||||
try {
|
try {
|
||||||
MultiUserAuthorizations multiUserAuths = MultiUserAuthorizations.newBuilder()
|
MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder();
|
||||||
.mergeFrom(data, pblen, data.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
|
||||||
return multiUserAuths;
|
return builder.build();
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,7 +82,6 @@ import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
import com.google.common.primitives.Ints;
|
import com.google.common.primitives.Ints;
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility methods for interacting with the underlying file system.
|
* Utility methods for interacting with the underlying file system.
|
||||||
|
@ -605,11 +604,10 @@ public abstract class FSUtils {
|
||||||
int pblen = ProtobufUtil.lengthOfPBMagic();
|
int pblen = ProtobufUtil.lengthOfPBMagic();
|
||||||
FSProtos.HBaseVersionFileContent.Builder builder =
|
FSProtos.HBaseVersionFileContent.Builder builder =
|
||||||
FSProtos.HBaseVersionFileContent.newBuilder();
|
FSProtos.HBaseVersionFileContent.newBuilder();
|
||||||
FSProtos.HBaseVersionFileContent fileContent;
|
|
||||||
try {
|
try {
|
||||||
fileContent = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
|
ProtobufUtil.mergeFrom(builder, bytes, pblen, bytes.length - pblen);
|
||||||
return fileContent.getVersion();
|
return builder.getVersion();
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
// Convert
|
// Convert
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.zookeeper;
|
package org.apache.hadoop.hbase.zookeeper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
@ -27,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.zookeeper.KeeperException;
|
import org.apache.zookeeper.KeeperException;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tracks the load balancer state up in ZK
|
* Tracks the load balancer state up in ZK
|
||||||
*/
|
*/
|
||||||
|
@ -85,8 +85,8 @@ public class LoadBalancerTracker extends ZooKeeperNodeTracker {
|
||||||
LoadBalancerProtos.LoadBalancerState.newBuilder();
|
LoadBalancerProtos.LoadBalancerState.newBuilder();
|
||||||
try {
|
try {
|
||||||
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
||||||
builder.mergeFrom(pbBytes, magicLen, pbBytes.length - magicLen);
|
ProtobufUtil.mergeFrom(builder, pbBytes, magicLen, pbBytes.length - magicLen);
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
throw new DeserializationException(e);
|
throw new DeserializationException(e);
|
||||||
}
|
}
|
||||||
return builder.build();
|
return builder.build();
|
||||||
|
|
|
@ -87,7 +87,7 @@ public class RegionServerTracker extends ZooKeeperListener {
|
||||||
byte[] data = ZKUtil.getData(watcher, nodePath);
|
byte[] data = ZKUtil.getData(watcher, nodePath);
|
||||||
if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) {
|
if (data != null && data.length > 0 && ProtobufUtil.isPBMagicPrefix(data)) {
|
||||||
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
||||||
rsInfoBuilder.mergeFrom(data, magicLen, data.length - magicLen);
|
ProtobufUtil.mergeFrom(rsInfoBuilder, data, magicLen, data.length - magicLen);
|
||||||
}
|
}
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Added tracking of RS " + nodePath);
|
LOG.debug("Added tracking of RS " + nodePath);
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.zookeeper;
|
package org.apache.hadoop.hbase.zookeeper;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.io.InterruptedIOException;
|
import java.io.InterruptedIOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -37,8 +38,6 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
|
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
|
||||||
import org.apache.zookeeper.KeeperException;
|
import org.apache.zookeeper.KeeperException;
|
||||||
|
|
||||||
import com.google.protobuf.InvalidProtocolBufferException;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementation of TableStateManager which reads, caches and sets state
|
* Implementation of TableStateManager which reads, caches and sets state
|
||||||
* up in ZooKeeper. If multiple read/write clients, will make for confusion.
|
* up in ZooKeeper. If multiple read/write clients, will make for confusion.
|
||||||
|
@ -348,9 +347,9 @@ public class ZKTableStateManager implements TableStateManager {
|
||||||
ProtobufUtil.expectPBMagicPrefix(data);
|
ProtobufUtil.expectPBMagicPrefix(data);
|
||||||
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
|
ZooKeeperProtos.Table.Builder builder = ZooKeeperProtos.Table.newBuilder();
|
||||||
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
int magicLen = ProtobufUtil.lengthOfPBMagic();
|
||||||
ZooKeeperProtos.Table t = builder.mergeFrom(data, magicLen, data.length - magicLen).build();
|
ProtobufUtil.mergeFrom(builder, data, magicLen, data.length - magicLen);
|
||||||
return t.getState();
|
return builder.getState();
|
||||||
} catch (InvalidProtocolBufferException e) {
|
} catch (IOException e) {
|
||||||
KeeperException ke = new KeeperException.DataInconsistencyException();
|
KeeperException ke = new KeeperException.DataInconsistencyException();
|
||||||
ke.initCause(e);
|
ke.initCause(e);
|
||||||
throw ke;
|
throw ke;
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hbase.mapreduce;
|
||||||
|
|
||||||
|
|
||||||
|
import com.google.protobuf.InvalidProtocolBufferException;
|
||||||
|
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||||
|
import org.junit.experimental.categories.Category;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
|
import org.apache.hadoop.hbase.client.Mutation;
|
||||||
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.mapreduce.MutationSerialization;
|
||||||
|
import org.apache.hadoop.hbase.mapreduce.ResultSerialization;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.io.serializer.Deserializer;
|
||||||
|
import org.apache.hadoop.io.serializer.Serializer;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.TestName;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
@Category(SmallTests.class)
|
||||||
|
public class TestSerialization {
|
||||||
|
@Rule public TestName name = new TestName();
|
||||||
|
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||||
|
private static Configuration conf;
|
||||||
|
private static final byte [] row = Bytes.toBytes("row1");
|
||||||
|
private static final byte [] qualifier = Bytes.toBytes("qualifier1");
|
||||||
|
private static final byte [] family = Bytes.toBytes("family1");
|
||||||
|
private static final byte [] value = new byte[100 * 1024 * 1024];
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setUpBeforeClass() throws Exception {
|
||||||
|
conf = TEST_UTIL.getConfiguration();
|
||||||
|
conf.setInt("hbase.client.keyvalue.maxsize", Integer.MAX_VALUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLargeMutation()
|
||||||
|
throws Exception {
|
||||||
|
Put put = new Put(row);
|
||||||
|
put.addColumn(family, qualifier, value);
|
||||||
|
|
||||||
|
MutationSerialization serialization = new MutationSerialization();
|
||||||
|
Serializer<Mutation> serializer = serialization.getSerializer(Mutation.class);
|
||||||
|
Deserializer<Mutation> deserializer = serialization.getDeserializer(Mutation.class);
|
||||||
|
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||||
|
ByteArrayInputStream is = null;
|
||||||
|
try {
|
||||||
|
serializer.open(os);
|
||||||
|
serializer.serialize(put);
|
||||||
|
os.flush();
|
||||||
|
is = new ByteArrayInputStream(os.toByteArray());
|
||||||
|
deserializer.open(is);
|
||||||
|
deserializer.deserialize(null);
|
||||||
|
} catch (InvalidProtocolBufferException e) {
|
||||||
|
assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(),
|
||||||
|
e.getCause() instanceof InvalidProtocolBufferException);
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail("Got an invalid exception: " + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLargeResult()
|
||||||
|
throws Exception {
|
||||||
|
Result res = Result.create(new KeyValue[] {new KeyValue(row, family, qualifier, 0L, value)});
|
||||||
|
|
||||||
|
ResultSerialization serialization = new ResultSerialization();
|
||||||
|
Serializer<Result> serializer = serialization.getSerializer(Result.class);
|
||||||
|
Deserializer<Result> deserializer = serialization.getDeserializer(Result.class);
|
||||||
|
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||||
|
ByteArrayInputStream is = null;
|
||||||
|
try {
|
||||||
|
serializer.open(os);
|
||||||
|
serializer.serialize(res);
|
||||||
|
os.flush();
|
||||||
|
is = new ByteArrayInputStream(os.toByteArray());
|
||||||
|
deserializer.open(is);
|
||||||
|
deserializer.deserialize(null);
|
||||||
|
} catch (InvalidProtocolBufferException e) {
|
||||||
|
assertTrue("Got InvalidProtocolBufferException in " + name.getMethodName(),
|
||||||
|
e.getCause() instanceof InvalidProtocolBufferException);
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail("Got an invalid exception: " + e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue