HBASE-23975 make hbase-rest use shaded protobuf (#1325)

Signed-off-by: stack <stack@apache.org>
Signed-off-by: Jan Hentschel <jan.hentschel@ultratendency.com>
Signed-off-by: Duo Zhang <zhangduo@apache.org>
This commit is contained in:
Semen Komissarov 2020-03-26 16:35:36 +03:00 committed by GitHub
parent 9042bc0058
commit d7ff742ce8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 65 additions and 65 deletions

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message Cell {
optional bytes row = 1; // unused if Cell is in a CellSet

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
import "CellMessage.proto";

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message ColumnSchema {
optional string name = 1;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message NamespaceProperties {
message Property {

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message Namespaces {
repeated string namespace = 1;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message Scanner {
optional bytes startRow = 1;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message StorageClusterStatus {
message Region {

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message TableInfo {
required string name = 1;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message TableList {
repeated string name = 1;

View File

@ -18,7 +18,7 @@
syntax = "proto2";
import "ColumnSchemaMessage.proto";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message TableSchema {
optional string name = 1;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
syntax = "proto2";
package org.apache.hadoop.hbase.rest.protobuf.generated;
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
message Version {
optional string restVersion = 1;

View File

@ -123,19 +123,6 @@
</systemPropertyVariables>
</configuration>
</plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
@ -166,7 +153,7 @@
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
<artifactId>hbase-protocol-shaded</artifactId>
<type>jar</type>
</dependency>
<dependency>
@ -235,8 +222,8 @@
<artifactId>hbase-shaded-miscellaneous</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<groupId>org.apache.hbase.thirdparty</groupId>
<artifactId>hbase-shaded-protobuf</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>

View File

@ -32,14 +32,15 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Representation of a cell. A cell is a single value associated a column and
* optional qualifier, and either the timestamp when it was stored or the user-
@ -189,8 +190,8 @@ public class CellModel implements ProtobufMessageHandler, Serializable {
@Override
public byte[] createProtobufOutput() {
Cell.Builder builder = Cell.newBuilder();
builder.setColumn(ByteStringer.wrap(getColumn()));
builder.setData(ByteStringer.wrap(getValue()));
builder.setColumn(UnsafeByteOperations.unsafeWrap(getColumn()));
builder.setData(UnsafeByteOperations.unsafeWrap(getValue()));
if (hasUserTimestamp()) {
builder.setTimestamp(getTimestamp());
}

View File

@ -29,11 +29,13 @@ import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.CellSetMessage.CellSet;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.yetus.audience.InterfaceAudience;
@ -114,11 +116,11 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
CellSet.Builder builder = CellSet.newBuilder();
for (RowModel row : getRows()) {
CellSet.Row.Builder rowBuilder = CellSet.Row.newBuilder();
rowBuilder.setKey(ByteStringer.wrap(row.getKey()));
rowBuilder.setKey(UnsafeByteOperations.unsafeWrap(row.getKey()));
for (CellModel cell : row.getCells()) {
Cell.Builder cellBuilder = Cell.newBuilder();
cellBuilder.setColumn(ByteStringer.wrap(cell.getColumn()));
cellBuilder.setData(ByteStringer.wrap(cell.getValue()));
cellBuilder.setColumn(UnsafeByteOperations.unsafeWrap(cell.getColumn()));
cellBuilder.setData(UnsafeByteOperations.unsafeWrap(cell.getValue()));
if (cell.hasUserTimestamp()) {
cellBuilder.setTimestamp(cell.getTimestamp());
}

View File

@ -34,7 +34,8 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf
import org.apache.hadoop.hbase.shaded.rest.protobuf
.generated.NamespacePropertiesMessage.NamespaceProperties;
/**

View File

@ -33,7 +33,8 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.NamespacesMessage.Namespaces;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -68,16 +68,18 @@ import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner;
import org.apache.hadoop.hbase.security.visibility.Authorizations;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.ScannerMessage.Scanner;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import com.google.protobuf.ByteString;
/**
* A representation of Scanner parameters.
@ -798,13 +800,13 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable {
public byte[] createProtobufOutput() {
Scanner.Builder builder = Scanner.newBuilder();
if (!Bytes.equals(startRow, HConstants.EMPTY_START_ROW)) {
builder.setStartRow(ByteStringer.wrap(startRow));
builder.setStartRow(UnsafeByteOperations.unsafeWrap(startRow));
}
if (!Bytes.equals(endRow, HConstants.EMPTY_START_ROW)) {
builder.setEndRow(ByteStringer.wrap(endRow));
builder.setEndRow(UnsafeByteOperations.unsafeWrap(endRow));
}
for (byte[] column: columns) {
builder.addColumns(ByteStringer.wrap(column));
builder.addColumns(UnsafeByteOperations.unsafeWrap(column));
}
if (startTime != 0) {
builder.setStartTime(startTime);

View File

@ -30,12 +30,14 @@ import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.yetus.audience.InterfaceAudience;
/**
@ -743,7 +745,7 @@ public class StorageClusterStatusModel implements Serializable, ProtobufMessageH
for (Node.Region region : node.regions) {
StorageClusterStatus.Region.Builder regionBuilder =
StorageClusterStatus.Region.newBuilder();
regionBuilder.setName(ByteStringer.wrap(region.name));
regionBuilder.setName(UnsafeByteOperations.unsafeWrap(region.name));
regionBuilder.setStores(region.stores);
regionBuilder.setStorefiles(region.storefiles);
regionBuilder.setStorefileSizeMB(region.storefileSizeMB);

View File

@ -28,11 +28,13 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.TableInfoMessage.TableInfo;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Representation of a list of table regions.
@ -135,8 +137,8 @@ public class TableInfoModel implements Serializable, ProtobufMessageHandler {
TableInfo.Region.Builder regionBuilder = TableInfo.Region.newBuilder();
regionBuilder.setName(aRegion.getName());
regionBuilder.setId(aRegion.getId());
regionBuilder.setStartKey(ByteStringer.wrap(aRegion.getStartKey()));
regionBuilder.setEndKey(ByteStringer.wrap(aRegion.getEndKey()));
regionBuilder.setStartKey(UnsafeByteOperations.unsafeWrap(aRegion.getStartKey()));
regionBuilder.setEndKey(UnsafeByteOperations.unsafeWrap(aRegion.getEndKey()));
regionBuilder.setLocation(aRegion.getLocation());
builder.addRegions(regionBuilder);
}

View File

@ -29,8 +29,9 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.TableListMessage.TableList;
/**
* Simple representation of a list of table names.

View File

@ -45,12 +45,13 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.TableSchemaMessage.TableSchema;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
/**
* A representation of HBase table descriptors.
*

View File

@ -29,10 +29,10 @@ import javax.xml.bind.annotation.XmlRootElement;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.RESTServlet;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.protobuf.generated.VersionMessage.Version;
import org.glassfish.jersey.servlet.ServletContainer;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.rest.protobuf.generated.VersionMessage.Version;
/**
* A representation of the collection of versions of the REST gateway software

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response;