HBASE-17056 Remove checked in PB generated files
Selective add of dependency on hbase-thirdparty jars. Update to READMEs on how protobuf is done (and update to refguide). Removed all checked in generated protobuf files. They are generated on the fly now as part of mainline build.
This commit is contained in:
parent
f923342998
commit
ee70b1d2e0
|
@ -87,6 +87,10 @@
|
|||
</build>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
<artifactId>hbase-shaded-protobuf</artifactId>
|
||||
</dependency>
|
||||
<!-- Intra-project dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
|
|
|
@ -1,24 +1,13 @@
|
|||
ON PROTOBUFS
|
||||
This maven module has protobuf definition files ('.protos') used by hbase
|
||||
Coprocessor Endpoints that ship with hbase core including tests. Coprocessor
|
||||
This maven module has protobuf definition files ('.protos') used by hbase
|
||||
Coprocessor Endpoints that ship with hbase core (including tests). Coprocessor
|
||||
Endpoints are meant to be standalone, independent code not reliant on hbase
|
||||
internals. They define their Service using protobuf. The protobuf version
|
||||
they use can be distinct from that used by HBase internally since HBase started
|
||||
shading its protobuf references. Endpoints have no access to the shaded protobuf
|
||||
hbase uses. They do have access to the content of hbase-protocol but avoid using
|
||||
as much of this as you can as it is liable to change.
|
||||
hbase uses. They do have access to the content of hbase-protocol -- the
|
||||
.protos found in this module -- but avoid using as much of this as you can as it is
|
||||
liable to change.
|
||||
|
||||
Generation of java files from protobuf .proto files included here is done apart
|
||||
from the build. Run the generation whenever you make changes to the .orotos files
|
||||
and then check in the produced java (The reasoning is that change is infrequent
|
||||
so why pay the price of generating files anew on each build.
|
||||
|
||||
To generate java files from protos run:
|
||||
|
||||
$ mvn compile -Dcompile-protobuf
|
||||
or
|
||||
$ mvn compile -Pcompile-protobuf
|
||||
|
||||
After you've done the above, check it and then check in changes (or post a patch
|
||||
on a JIRA with your definition file changes and the generated files). Be careful
|
||||
to notice new files and files removed and do appropriate git rm/adds.
|
||||
Generation of java files from protobuf .proto files included here is done as
|
||||
part of the build.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<!--
|
||||
<!--
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
|
@ -19,37 +19,34 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>hbase</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<version>2.0.0-alpha-2-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>hbase-endpoint</artifactId>
|
||||
<name>Apache HBase - Coprocessor Endpoint</name>
|
||||
<description>HBase Coprocessor Endpoint implementations</description>
|
||||
<!--REMOVE-->
|
||||
|
||||
<properties>
|
||||
<maven.javadoc.skip>true</maven.javadoc.skip>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-site-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Make a jar and put the sources in the jar -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
</plugin>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>hbase</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<version>2.0.0-alpha-2-SNAPSHOT</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
<artifactId>hbase-endpoint</artifactId>
|
||||
<name>Apache HBase - Coprocessor Endpoint</name>
|
||||
<description>HBase Coprocessor Endpoint implementations</description>
|
||||
<!--REMOVE-->
|
||||
<properties>
|
||||
<maven.javadoc.skip>true</maven.javadoc.skip>
|
||||
</properties>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-site-plugin</artifactId>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Make a jar and put the sources in the jar -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<!--Make it so assembly:single does nothing in here-->
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
|
@ -57,38 +54,56 @@
|
|||
<skipAssembly>true</skipAssembly>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<additionalProtoPathElements>
|
||||
<additionalProtoPathElement>${basedir}/../hbase-protocol/src/main/protobuf</additionalProtoPathElement>
|
||||
</additionalProtoPathElements>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
|
||||
<plugin>
|
||||
<groupId>org.eclipse.m2e</groupId>
|
||||
<artifactId>lifecycle-mapping</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<configuration>
|
||||
<lifecycleMappingMetadata>
|
||||
<pluginExecutions>
|
||||
<pluginExecution>
|
||||
<pluginExecutionFilter>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-maven-plugins</artifactId>
|
||||
<versionRange>[2.0.5-alpha,)</versionRange>
|
||||
<goals>
|
||||
<goal>protoc</goal>
|
||||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<ignore/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
</pluginExecutions>
|
||||
</lifecycleMappingMetadata>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
|
||||
<plugin>
|
||||
<groupId>org.eclipse.m2e</groupId>
|
||||
<artifactId>lifecycle-mapping</artifactId>
|
||||
<configuration>
|
||||
<lifecycleMappingMetadata>
|
||||
<pluginExecutions>
|
||||
<pluginExecution>
|
||||
<pluginExecutionFilter>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-maven-plugins</artifactId>
|
||||
<versionRange>[2.0.5-alpha,)</versionRange>
|
||||
<goals>
|
||||
<goal>protoc</goal>
|
||||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<ignore></ignore>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
</pluginExecutions>
|
||||
</lifecycleMappingMetadata>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
||||
<dependencies>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
<artifactId>hbase-shaded-miscellaneous</artifactId>
|
||||
|
@ -131,88 +146,57 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-protocol</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
</dependency>
|
||||
<!--Some of the CPEPs use hbase server-side internals; they shouldn't!
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-protocol</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
</dependency>
|
||||
<!--Some of the CPEPs use hbase server-side internals; they shouldn't!
|
||||
-->
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-server</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-server</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- General dependencies -->
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<profiles>
|
||||
<!-- Skip the tests in this module -->
|
||||
<profile>
|
||||
<id>skipRpcTests</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>skipRpcTests</name>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
||||
<surefire.skipSecondPart>true</surefire.skipSecondPart>
|
||||
</properties>
|
||||
</profile>
|
||||
<profile>
|
||||
<id>compile-protobuf</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>compile-protobuf</name>
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<additionalProtoPathElements>
|
||||
<additionalProtoPathElement>${basedir}/../hbase-protocol/src/main/protobuf</additionalProtoPathElement>
|
||||
</additionalProtoPathElements>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-server</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-server</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- General dependencies -->
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<profiles>
|
||||
<!-- Skip the tests in this module -->
|
||||
<profile>
|
||||
<id>skipRpcTests</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>skipRpcTests</name>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
||||
<surefire.skipSecondPart>true</surefire.skipSecondPart>
|
||||
</properties>
|
||||
</profile>
|
||||
<!-- Profiles for building against different hadoop versions -->
|
||||
<!-- There are a lot of common dependencies used here, should investigate
|
||||
if we can combine these profiles somehow -->
|
||||
|
||||
<!-- profile for building against Hadoop 2.x. This is the default. -->
|
||||
<profile>
|
||||
<id>hadoop-2.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
|
||||
<!--h2--><name>!hadoop.profile</name>
|
||||
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
|
||||
<!--h2-->
|
||||
<name>!hadoop.profile</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
|
@ -315,5 +299,5 @@
|
|||
</dependency>
|
||||
</dependencies>
|
||||
</profile>
|
||||
</profiles>
|
||||
</profiles>
|
||||
</project>
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -63,27 +63,8 @@ Example code.
|
|||
3. Execute {./DemoClient}.
|
||||
|
||||
ON PROTOBUFS
|
||||
This maven module has protobuf definition files ('.protos') used by hbase
|
||||
Coprocessor Endpoints examples including tests. Coprocessor
|
||||
Endpoints are meant to be standalone, independent code not reliant on hbase
|
||||
internals. They define their Service using protobuf. The protobuf version
|
||||
they use can be distinct from that used by HBase internally since HBase started
|
||||
shading its protobuf references. Endpoints have no access to the shaded protobuf
|
||||
hbase uses. They do have access to the content of hbase-protocol -- the
|
||||
.protos found in here -- but avoid using as much of this as you can as it is
|
||||
liable to change.
|
||||
This maven module has core protobuf definition files ('.protos') used by hbase
|
||||
examples.
|
||||
|
||||
Generation of java files from protobuf .proto files included here is done apart
|
||||
from the build. Run the generation whenever you make changes to the .orotos files
|
||||
and then check in the produced java (The reasoning is that change is infrequent
|
||||
so why pay the price of generating files anew on each build.
|
||||
|
||||
To generate java files from protos run:
|
||||
|
||||
$ mvn compile -Dcompile-protobuf
|
||||
or
|
||||
$ mvn compile -Pcompile-protobuf
|
||||
|
||||
After you've done the above, check it and then check in changes (or post a patch
|
||||
on a JIRA with your definition file changes and the generated files). Be careful
|
||||
to notice new files and files removed and do appropriate git rm/adds.
|
||||
Generation of java files from protobuf .proto files included here is done as
|
||||
part of the build.
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
<artifactId>hbase-examples</artifactId>
|
||||
<name>Apache HBase - Examples</name>
|
||||
<description>Examples of HBase usage</description>
|
||||
<!--REMOVE-->
|
||||
<!--REMOVE-->
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
|
@ -46,20 +46,33 @@
|
|||
<skipAssembly>true</skipAssembly>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>${surefire.version}</version>
|
||||
<configuration>
|
||||
<!-- Have to set the groups here because we only do
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>${surefire.version}</version>
|
||||
<configuration>
|
||||
<!-- Have to set the groups here because we only do
|
||||
split tests in this package, so groups on live in this module -->
|
||||
<groups>${surefire.firstPartGroups}</groups>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Make a jar and put the sources in the jar -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
</plugin>
|
||||
<groups>${surefire.firstPartGroups}</groups>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- Make a jar and put the sources in the jar -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
|
@ -81,7 +94,7 @@
|
|||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<ignore></ignore>
|
||||
<ignore/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
<pluginExecution>
|
||||
|
@ -94,7 +107,7 @@
|
|||
</goals>
|
||||
</pluginExecutionFilter>
|
||||
<action>
|
||||
<ignore></ignore>
|
||||
<ignore/>
|
||||
</action>
|
||||
</pluginExecution>
|
||||
</pluginExecutions>
|
||||
|
@ -116,16 +129,16 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-common</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-protocol</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-protocol</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
|
@ -140,11 +153,10 @@
|
|||
<artifactId>hbase-thrift</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-testing-util</artifactId>
|
||||
<scope>test</scope>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-testing-util</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.thrift</groupId>
|
||||
<artifactId>libthrift</artifactId>
|
||||
|
@ -161,158 +173,126 @@
|
|||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<profiles>
|
||||
<!-- Skip the tests in this module -->
|
||||
<profile>
|
||||
<id>skipExamplesTests</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>skipExamplesTests</name>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
||||
<surefire.skipSecondPart>true</surefire.skipSecondPart>
|
||||
</properties>
|
||||
</profile>
|
||||
<profile>
|
||||
<id>compile-protobuf</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>compile-protobuf</name>
|
||||
</property>
|
||||
</activation>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
|
||||
<!-- Profiles for building against different hadoop versions -->
|
||||
<!-- There are a lot of common dependencies used here, should investigate
|
||||
if we can combine these profiles somehow -->
|
||||
|
||||
<!-- profile for building against Hadoop 2.x. This is the default -->
|
||||
<profile>
|
||||
<id>hadoop-2.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
|
||||
<!--h2--><name>!hadoop.profile</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-mrapp-generated-classpath</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>build-classpath</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- needed to run the unit test for DS to generate
|
||||
</dependencies>
|
||||
<profiles>
|
||||
<!-- Skip the tests in this module -->
|
||||
<profile>
|
||||
<id>skipExamplesTests</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>skipExamplesTests</name>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
||||
<surefire.skipSecondPart>true</surefire.skipSecondPart>
|
||||
</properties>
|
||||
</profile>
|
||||
<!-- Profiles for building against different hadoop versions -->
|
||||
<!-- There are a lot of common dependencies used here, should investigate
|
||||
if we can combine these profiles somehow -->
|
||||
<!-- profile for building against Hadoop 2.x. This is the default -->
|
||||
<profile>
|
||||
<id>hadoop-2.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
|
||||
<!--h2-->
|
||||
<name>!hadoop.profile</name>
|
||||
</property>
|
||||
</activation>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-mrapp-generated-classpath</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>build-classpath</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- needed to run the unit test for DS to generate
|
||||
the required classpath that is required in the env
|
||||
of the launch container in the mini mr/yarn cluster
|
||||
-->
|
||||
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<!--
|
||||
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<!--
|
||||
profile for building against Hadoop 3.0.x. Activate using:
|
||||
mvn -Dhadoop.profile=3.0
|
||||
-->
|
||||
<profile>
|
||||
<id>hadoop-3.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>hadoop.profile</name>
|
||||
<value>3.0</value>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>jdk.tools</groupId>
|
||||
<artifactId>jdk.tools</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-minicluster</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-mrapp-generated-classpath</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>build-classpath</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- needed to run the unit test for DS to generate
|
||||
<profile>
|
||||
<id>hadoop-3.0</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>hadoop.profile</name>
|
||||
<value>3.0</value>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<hadoop.version>3.0-SNAPSHOT</hadoop.version>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>jdk.tools</groupId>
|
||||
<artifactId>jdk.tools</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-minicluster</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>create-mrapp-generated-classpath</id>
|
||||
<phase>generate-test-resources</phase>
|
||||
<goals>
|
||||
<goal>build-classpath</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- needed to run the unit test for DS to generate
|
||||
the required classpath that is required in the env
|
||||
of the launch container in the mini mr/yarn cluster
|
||||
-->
|
||||
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
<outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -69,6 +69,10 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
<artifactId>hbase-shaded-protobuf</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-protocol-shaded</artifactId>
|
||||
|
|
|
@ -1,53 +1,6 @@
|
|||
Please read carefully as the 'menu options' have changed.
|
||||
What you do in here is not what you do elsewhere to generate
|
||||
proto java files.
|
||||
|
||||
This module has proto files used by core. These protos
|
||||
overlap with protos that are used by coprocessor endpoints
|
||||
(CPEP) in the module hbase-protocol. So the core versions have
|
||||
(CPEP) in the module hbase-protocol. So core versions have
|
||||
a different name, the generated classes are relocated
|
||||
-- i.e. shaded -- to a new location; they are moved from
|
||||
org.apache.hadoop.hbase.* to org.apache.hadoop.hbase.shaded.
|
||||
|
||||
This module also includes the protobuf that hbase core depends
|
||||
on again relocated to live at an offset of
|
||||
org.apache.hadoop.hbase.shaded so as to avoid clashes with other
|
||||
versions of protobuf resident on our CLASSPATH included,
|
||||
transitively or otherwise, by dependencies: i.e. the shaded
|
||||
protobuf Message class is at
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message
|
||||
rather than at com.google.protobuf.Message.
|
||||
|
||||
Finally, this module also includes patches applied on top of
|
||||
protobuf to add functionality not yet in protobuf that we
|
||||
need now.
|
||||
|
||||
If you make changes to protos, to the protobuf version or to
|
||||
the patches you want to apply to protobuf, you must rerun the
|
||||
below step and then check in what it generated:
|
||||
|
||||
$ mvn install -Dcompile-protobuf
|
||||
|
||||
or
|
||||
|
||||
$ mvn install -Pcompile-protobuf
|
||||
|
||||
NOTE: 'install' above whereas other proto generation only needs 'compile'
|
||||
NOTE: Unlike elsehwere the above command does NOT install this modules jar
|
||||
into the repo., intentionally. The jar made by the above is a scratch jar
|
||||
that is part of the process that gets us to a set of files to check in;
|
||||
it is not for consumption. Run mvn install without the '-Pcompile-protobuf'
|
||||
option to get this modules' artifact installed in your repo!
|
||||
|
||||
When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded
|
||||
will have been updated. Make sure all builds and then carefully
|
||||
check in the changes. Files may have been added or removed
|
||||
by the steps above.
|
||||
|
||||
The protobuf version used internally by hbase differs from what
|
||||
is used over in the CPEP hbase-protocol module but mvn takes care
|
||||
of ensuring we have the right protobuf in place so you don't have to.
|
||||
|
||||
If you have patches for the protobuf, add them to
|
||||
src/main/patches directory. They will be applied after
|
||||
protobuf is shaded and unbundled into src/main/java.
|
||||
|
|
|
@ -32,18 +32,12 @@
|
|||
<properties>
|
||||
<maven.javadoc.skip>true</maven.javadoc.skip>
|
||||
<!--Version of protobuf that hbase uses internally (we shade our pb)
|
||||
Must match what is out in hbase-thirdparty include.
|
||||
(Note, there may only be a 3.3.0 protoc... no 3.3.1 protoc
|
||||
-->
|
||||
<internal.protobuf.version>3.2.0</internal.protobuf.version>
|
||||
<!--The Default target dir-->
|
||||
<classes.dir>${project.build.directory}/classes</classes.dir>
|
||||
<!--The Default location for sources-->
|
||||
<sources.dir>src/main/java</sources.dir>
|
||||
<internal.protobuf.version>3.3.0</internal.protobuf.version>
|
||||
</properties>
|
||||
<build>
|
||||
<!--I want to override these in profile so define them
|
||||
with variables up here-->
|
||||
<sourceDirectory>${sources.dir}</sourceDirectory>
|
||||
<outputDirectory>${classes.dir}</outputDirectory>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
|
@ -56,7 +50,7 @@
|
|||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<!--Make it so assembly:single does nothing in here-->
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
|
@ -80,6 +74,101 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<protocArtifact>com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
|
||||
<attachProtoSources>false</attachProtoSources>
|
||||
<checkStaleness>true</checkStaleness>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!--Need this old plugin to replace in generated files instances
|
||||
of com.google.protobuf so instead its o.a.h.h.com.google.protobuf.
|
||||
Plugin is old and in google code archive. Here is usage done by
|
||||
anohther: https://github.com/beiliubei/maven-replacer-plugin/wiki/Usage-Guide
|
||||
The mess with the regex in the below is to prevent replacement every time
|
||||
we run mvn install. There is probably a better way of avoiding the
|
||||
double interpolation but this is it for now.
|
||||
-->
|
||||
<plugin>
|
||||
<groupId>com.google.code.maven-replacer-plugin</groupId>
|
||||
<artifactId>replacer</artifactId>
|
||||
<version>1.5.3</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>replace</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<basedir>${basedir}/target/generated-sources/</basedir>
|
||||
<includes>
|
||||
<include>**/*.java</include>
|
||||
</includes>
|
||||
<replacements>
|
||||
<replacement>
|
||||
<token>([^\.])com.google.protobuf</token>
|
||||
<value>$1org.apache.hadoop.hbase.shaded.com.google.protobuf</value>
|
||||
</replacement>
|
||||
</replacements>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<minimizeJar>true</minimizeJar>
|
||||
<shadeSourcesContent>true</shadeSourcesContent>
|
||||
<!-- Causes an NPE until shade 3.0.1. See MSHADE-247
|
||||
<createSourcesJar>true</createSourcesJar>
|
||||
-->
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>com.google.protobuf</pattern>
|
||||
<shadedPattern>org.apache.hadoop.hbase.shaded.com.google.protobuf</shadedPattern>
|
||||
</relocation>
|
||||
</relocations>
|
||||
<artifactSet>
|
||||
<excludes>
|
||||
<!--Exclude protobuf itself. We get a patched version from hbase-thirdparty.
|
||||
-->
|
||||
<exclude>org.apache.hadoop.hbase.shaded.com.google:*</exclude>
|
||||
<exclude>com.google.protobuf:protobuf-java</exclude>
|
||||
<exclude>com.google.code.findbugs:*</exclude>
|
||||
<exclude>com.google.errorprone:error_prone_annotations</exclude>
|
||||
<exclude>com.google.j2objc:j2objc-annotations</exclude>
|
||||
<exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
|
||||
<exclude>org.codehaus.mojo:animal-sniffer-annotations</exclude>
|
||||
<exclude>junit:junit</exclude>
|
||||
<exclude>log4j:log4j</exclude>
|
||||
<exclude>commons-logging:commons-logging</exclude>
|
||||
<exclude>org.apache.hbase:hbase-annotations</exclude>
|
||||
<exclude>com.github.stephenc.fingbugs:*</exclude>
|
||||
<exclude></exclude>
|
||||
</excludes>
|
||||
</artifactSet>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
|
@ -115,6 +204,10 @@
|
|||
excluded above in the shade plugin else the dependency
|
||||
will get bundled-->
|
||||
<!-- Intra-project dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase.thirdparty</groupId>
|
||||
<artifactId>hbase-shaded-protobuf</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hbase</groupId>
|
||||
<artifactId>hbase-annotations</artifactId>
|
||||
|
@ -147,218 +240,8 @@
|
|||
</activation>
|
||||
<properties>
|
||||
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
||||
<surefire.skipSecondPart>true</surefire.skipSecondPart>
|
||||
</properties>
|
||||
</profile>
|
||||
<profile>
|
||||
<id>compile-protobuf</id>
|
||||
<!--
|
||||
Generate and shade proto files. Drops generated java files
|
||||
under src/main/java when done. Check in the generated files so
|
||||
available at build time. Run this profile/step everytime you change
|
||||
proto files or update the protobuf version.
|
||||
|
||||
The below does a bunch of ugly stuff. It purges current content
|
||||
of the generated and shaded com.google.protobuf java files first.
|
||||
Let me say that again. We do a remove of java files under src/main/java
|
||||
in the shaded dirs. It does this because later we apply patches and
|
||||
patches fail if they've already been applied. We remove too because we
|
||||
overlay the shaded protobuf and if files have been removed or added,
|
||||
it'll be more plain if we have first done this delete.
|
||||
|
||||
Next up we generate protos, build a scratch jar that contains protos
|
||||
only and stuff we want shaded (we have to do this because shading only
|
||||
works at install time on a jar), run the shade on the jar, then
|
||||
carefully STOP this scratch jar from being put into the local repository
|
||||
(because it can mess up builds that come later... mvn automatically wants
|
||||
to install artifact into repo per module). Finally, undo this shaded
|
||||
jar over the src/main/java directory, and then apply patches atop this.
|
||||
|
||||
The result needs to be checked in.
|
||||
-->
|
||||
<activation>
|
||||
<property>
|
||||
<name>compile-protobuf</name>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<profile.id>compile-protobuf</profile.id>
|
||||
<!--Directory under target to hold generated protos files-->
|
||||
<protoc.sources.dir>${project.build.directory}/protoc-generated-sources</protoc.sources.dir>
|
||||
<!--When doing this step, the sources.dir is pointed at generated protos, NOT src/main/java-->
|
||||
<sources.dir>${protoc.sources.dir}</sources.dir>
|
||||
<!--Where to compile protos into-->
|
||||
<classes.dir>${project.build.directory}/protoc-generated-classes</classes.dir>
|
||||
<!--When the compile for this profile runs, make sure it makes jars that
|
||||
can be related back to this shading profile. Give them the shading profile
|
||||
name as a prefix.
|
||||
-->
|
||||
<jar.finalName>${profile.id}.${project.artifactId}-${project.version}</jar.finalName>
|
||||
</properties>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-clean-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>pre-compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>clean</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<filesets>
|
||||
<fileset>
|
||||
<directory>${basedir}/src/main/java/org/apache/hadoop/hbase/shaded</directory>
|
||||
<includes>
|
||||
<include>ipc/protobuf/generated/**/*.java</include>
|
||||
<include>protobuf/generated/**/*.java</include>
|
||||
<include>com/google/protobuf/**/*.java</include>
|
||||
</includes>
|
||||
<followSymlinks>false</followSymlinks>
|
||||
</fileset>
|
||||
</filesets>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.xolstice.maven.plugins</groupId>
|
||||
<artifactId>protobuf-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>compile-protoc</id>
|
||||
<phase>generate-sources</phase>
|
||||
<goals>
|
||||
<goal>compile</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<protocArtifact>com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
|
||||
<outputDirectory>${protoc.sources.dir}</outputDirectory>
|
||||
<attachProtoSources>false</attachProtoSources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<version>${maven.jar.version}</version>
|
||||
<configuration>
|
||||
<finalName>${jar.finalName}</finalName>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>${maven.shade.version}</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<shadeSourcesContent>true</shadeSourcesContent>
|
||||
<createSourcesJar>true</createSourcesJar>
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>com.google.protobuf</pattern>
|
||||
<shadedPattern>org.apache.hadoop.hbase.shaded.com.google.protobuf</shadedPattern>
|
||||
</relocation>
|
||||
</relocations>
|
||||
<!-- What I got when I did a mvn dependency:list for this
|
||||
module. Exclude all but the protobuf
|
||||
[INFO] commons-logging:commons-logging:jar:1.2:compile
|
||||
[INFO] com.github.stephenc.findbugs:findbugs-annotations:jar:1.3.9-1:compile
|
||||
[INFO] log4j:log4j:jar:1.2.17:compile
|
||||
[INFO] com.google.protobuf:protobuf-java:jar:2.5.0:compile
|
||||
[INFO] org.hamcrest:hamcrest-core:jar:1.3:test
|
||||
[INFO] org.mockito:mockito-all:jar:1.10.8:test
|
||||
[INFO] junit:junit:jar:4.12:compile
|
||||
[INFO] org.apache.hbase:hbase-annotations:jar:2.0.0-SNAPSHOT:compile
|
||||
|
||||
The list below must exlude all of the above except protobuf.
|
||||
-->
|
||||
<artifactSet>
|
||||
<excludes>
|
||||
<exclude>commons-logging:commons-logging</exclude>
|
||||
<exclude>com.github.stephenc.findbugs:findbugs-annotations</exclude>
|
||||
<exclude>log4j:log4j</exclude>
|
||||
<exclude>org.hamcrest:hamcrest-core</exclude>
|
||||
<exclude>org.mockito:mockito-all</exclude>
|
||||
<exclude>junit:junit</exclude>
|
||||
<exclude>org.apache.hbase:hbase-annotations</exclude>
|
||||
</excludes>
|
||||
</artifactSet>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!--Now unpack the shaded jar made above so the shaded classes
|
||||
are available to subsequent modules-->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>unpack</id>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>unpack</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<artifactItem>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>${project.artifactId}</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<classifier>sources</classifier>
|
||||
<type>jar</type>
|
||||
<overWrite>true</overWrite>
|
||||
<outputDirectory>${basedir}/src/main/java</outputDirectory>
|
||||
<includes>**/*.java</includes>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-patch-plugin</artifactId>
|
||||
<version>${maven.patch.version}</version>
|
||||
<configuration>
|
||||
<!--Patches are made at top-level-->
|
||||
<targetDirectory>${basedir}/..</targetDirectory>
|
||||
<skipApplication>false</skipApplication>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>patch</id>
|
||||
<configuration>
|
||||
<strip>1</strip>
|
||||
<patchDirectory>src/main/patches</patchDirectory>
|
||||
<patchTrackingFile>${project.build.directory}/patches-applied.txt</patchTrackingFile>
|
||||
<naturalOrderProcessing>true</naturalOrderProcessing>
|
||||
</configuration>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<!--This should run after the above unpack phase-->
|
||||
<goal>apply</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-install-plugin</artifactId>
|
||||
<version>${maven.install.version}</version>
|
||||
<configuration>
|
||||
<skip>true</skip>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
||||
|
|
|
@ -1,646 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLite;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A partial implementation of the {@link Message} interface which implements
|
||||
* as many methods of that interface as possible in terms of other methods.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public abstract class AbstractMessage
|
||||
// TODO(dweis): Update GeneratedMessage to parameterize with MessageType and BuilderType.
|
||||
extends AbstractMessageLite
|
||||
implements Message {
|
||||
|
||||
@Override
|
||||
public boolean isInitialized() {
|
||||
return MessageReflection.isInitialized(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for the parent of a Builder that allows the builder to
|
||||
* communicate invalidations back to the parent for use when using nested
|
||||
* builders.
|
||||
*/
|
||||
protected interface BuilderParent {
|
||||
|
||||
/**
|
||||
* A builder becomes dirty whenever a field is modified -- including fields
|
||||
* in nested builders -- and becomes clean when build() is called. Thus,
|
||||
* when a builder becomes dirty, all its parents become dirty as well, and
|
||||
* when it becomes clean, all its children become clean. The dirtiness
|
||||
* state is used to invalidate certain cached values.
|
||||
* <br>
|
||||
* To this end, a builder calls markDirty() on its parent whenever it
|
||||
* transitions from clean to dirty. The parent must propagate this call to
|
||||
* its own parent, unless it was already dirty, in which case the
|
||||
* grandparent must necessarily already be dirty as well. The parent can
|
||||
* only transition back to "clean" after calling build() on all children.
|
||||
*/
|
||||
void markDirty();
|
||||
}
|
||||
|
||||
/** Create a nested builder. */
|
||||
protected Message.Builder newBuilderForType(BuilderParent parent) {
|
||||
throw new UnsupportedOperationException("Nested builder is not supported for this type.");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<String> findInitializationErrors() {
|
||||
return MessageReflection.findMissingFields(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInitializationErrorString() {
|
||||
return MessageReflection.delimitWithCommas(findInitializationErrors());
|
||||
}
|
||||
|
||||
/** TODO(jieluo): Clear it when all subclasses have implemented this method. */
|
||||
@Override
|
||||
public boolean hasOneof(OneofDescriptor oneof) {
|
||||
throw new UnsupportedOperationException("hasOneof() is not implemented.");
|
||||
}
|
||||
|
||||
/** TODO(jieluo): Clear it when all subclasses have implemented this method. */
|
||||
@Override
|
||||
public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) {
|
||||
throw new UnsupportedOperationException(
|
||||
"getOneofFieldDescriptor() is not implemented.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return TextFormat.printToString(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final CodedOutputStream output) throws IOException {
|
||||
MessageReflection.writeMessageTo(this, getAllFields(), output, false);
|
||||
}
|
||||
|
||||
protected int memoizedSize = -1;
|
||||
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) {
|
||||
return size;
|
||||
}
|
||||
|
||||
memoizedSize = MessageReflection.getSerializedSize(this, getAllFields());
|
||||
return memoizedSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof Message)) {
|
||||
return false;
|
||||
}
|
||||
final Message otherMessage = (Message) other;
|
||||
if (getDescriptorForType() != otherMessage.getDescriptorForType()) {
|
||||
return false;
|
||||
}
|
||||
return compareFields(getAllFields(), otherMessage.getAllFields()) &&
|
||||
getUnknownFields().equals(otherMessage.getUnknownFields());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = memoizedHashCode;
|
||||
if (hash == 0) {
|
||||
hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
hash = hashFields(hash, getAllFields());
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
private static ByteString toByteString(Object value) {
|
||||
if (value instanceof byte[]) {
|
||||
return ByteString.copyFrom((byte[]) value);
|
||||
} else {
|
||||
return (ByteString) value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two bytes fields. The parameters must be either a byte array or a
|
||||
* ByteString object. They can be of different type though.
|
||||
*/
|
||||
private static boolean compareBytes(Object a, Object b) {
|
||||
if (a instanceof byte[] && b instanceof byte[]) {
|
||||
return Arrays.equals((byte[])a, (byte[])b);
|
||||
}
|
||||
return toByteString(a).equals(toByteString(b));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a list of MapEntry messages into a Map used for equals() and
|
||||
* hashCode().
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
private static Map convertMapEntryListToMap(List list) {
|
||||
if (list.isEmpty()) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
Map result = new HashMap();
|
||||
Iterator iterator = list.iterator();
|
||||
Message entry = (Message) iterator.next();
|
||||
Descriptors.Descriptor descriptor = entry.getDescriptorForType();
|
||||
Descriptors.FieldDescriptor key = descriptor.findFieldByName("key");
|
||||
Descriptors.FieldDescriptor value = descriptor.findFieldByName("value");
|
||||
Object fieldValue = entry.getField(value);
|
||||
if (fieldValue instanceof EnumValueDescriptor) {
|
||||
fieldValue = ((EnumValueDescriptor) fieldValue).getNumber();
|
||||
}
|
||||
result.put(entry.getField(key), fieldValue);
|
||||
while (iterator.hasNext()) {
|
||||
entry = (Message) iterator.next();
|
||||
fieldValue = entry.getField(value);
|
||||
if (fieldValue instanceof EnumValueDescriptor) {
|
||||
fieldValue = ((EnumValueDescriptor) fieldValue).getNumber();
|
||||
}
|
||||
result.put(entry.getField(key), fieldValue);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two map fields. The parameters must be a list of MapEntry
|
||||
* messages.
|
||||
*/
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
private static boolean compareMapField(Object a, Object b) {
|
||||
Map ma = convertMapEntryListToMap((List) a);
|
||||
Map mb = convertMapEntryListToMap((List) b);
|
||||
return MapFieldLite.equals(ma, mb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares two set of fields.
|
||||
* This method is used to implement {@link AbstractMessage#equals(Object)}
|
||||
* and {@link AbstractMutableMessage#equals(Object)}. It takes special care
|
||||
* of bytes fields because immutable messages and mutable messages use
|
||||
* different Java type to reprensent a bytes field and this method should be
|
||||
* able to compare immutable messages, mutable messages and also an immutable
|
||||
* message to a mutable message.
|
||||
*/
|
||||
static boolean compareFields(Map<FieldDescriptor, Object> a,
|
||||
Map<FieldDescriptor, Object> b) {
|
||||
if (a.size() != b.size()) {
|
||||
return false;
|
||||
}
|
||||
for (FieldDescriptor descriptor : a.keySet()) {
|
||||
if (!b.containsKey(descriptor)) {
|
||||
return false;
|
||||
}
|
||||
Object value1 = a.get(descriptor);
|
||||
Object value2 = b.get(descriptor);
|
||||
if (descriptor.getType() == FieldDescriptor.Type.BYTES) {
|
||||
if (descriptor.isRepeated()) {
|
||||
List list1 = (List) value1;
|
||||
List list2 = (List) value2;
|
||||
if (list1.size() != list2.size()) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < list1.size(); i++) {
|
||||
if (!compareBytes(list1.get(i), list2.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Compares a singular bytes field.
|
||||
if (!compareBytes(value1, value2)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else if (descriptor.isMapField()) {
|
||||
if (!compareMapField(value1, value2)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// Compare non-bytes fields.
|
||||
if (!value1.equals(value2)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the hash code of a map field. {@code value} must be a list of
|
||||
* MapEntry messages.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private static int hashMapField(Object value) {
|
||||
return MapFieldLite.calculateHashCodeForMap(convertMapEntryListToMap((List) value));
|
||||
}
|
||||
|
||||
/** Get a hash code for given fields and values, using the given seed. */
|
||||
@SuppressWarnings("unchecked")
|
||||
protected static int hashFields(int hash, Map<FieldDescriptor, Object> map) {
|
||||
for (Map.Entry<FieldDescriptor, Object> entry : map.entrySet()) {
|
||||
FieldDescriptor field = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
hash = (37 * hash) + field.getNumber();
|
||||
if (field.isMapField()) {
|
||||
hash = (53 * hash) + hashMapField(value);
|
||||
} else if (field.getType() != FieldDescriptor.Type.ENUM){
|
||||
hash = (53 * hash) + value.hashCode();
|
||||
} else if (field.isRepeated()) {
|
||||
List<? extends EnumLite> list = (List<? extends EnumLite>) value;
|
||||
hash = (53 * hash) + Internal.hashEnumList(list);
|
||||
} else {
|
||||
hash = (53 * hash) + Internal.hashEnum((EnumLite) value);
|
||||
}
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Package private helper method for AbstractParser to create
|
||||
* UninitializedMessageException with missing field information.
|
||||
*/
|
||||
@Override
|
||||
UninitializedMessageException newUninitializedMessageException() {
|
||||
return Builder.newUninitializedMessageException(this);
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
/**
|
||||
* A partial implementation of the {@link Message.Builder} interface which
|
||||
* implements as many methods of that interface as possible in terms of
|
||||
* other methods.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static abstract class Builder<BuilderType extends Builder<BuilderType>>
|
||||
extends AbstractMessageLite.Builder
|
||||
implements Message.Builder {
|
||||
// The compiler produces an error if this is not declared explicitly.
|
||||
@Override
|
||||
public abstract BuilderType clone();
|
||||
|
||||
/** TODO(jieluo): Clear it when all subclasses have implemented this method. */
|
||||
@Override
|
||||
public boolean hasOneof(OneofDescriptor oneof) {
|
||||
throw new UnsupportedOperationException("hasOneof() is not implemented.");
|
||||
}
|
||||
|
||||
/** TODO(jieluo): Clear it when all subclasses have implemented this method. */
|
||||
@Override
|
||||
public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) {
|
||||
throw new UnsupportedOperationException(
|
||||
"getOneofFieldDescriptor() is not implemented.");
|
||||
}
|
||||
|
||||
/** TODO(jieluo): Clear it when all subclasses have implemented this method. */
|
||||
@Override
|
||||
public BuilderType clearOneof(OneofDescriptor oneof) {
|
||||
throw new UnsupportedOperationException("clearOneof() is not implemented.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType clear() {
|
||||
for (final Map.Entry<FieldDescriptor, Object> entry :
|
||||
getAllFields().entrySet()) {
|
||||
clearField(entry.getKey());
|
||||
}
|
||||
return (BuilderType) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> findInitializationErrors() {
|
||||
return MessageReflection.findMissingFields(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getInitializationErrorString() {
|
||||
return MessageReflection.delimitWithCommas(findInitializationErrors());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BuilderType internalMergeFrom(AbstractMessageLite other) {
|
||||
return mergeFrom((Message) other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final Message other) {
|
||||
if (other.getDescriptorForType() != getDescriptorForType()) {
|
||||
throw new IllegalArgumentException(
|
||||
"mergeFrom(Message) can only merge messages of the same type.");
|
||||
}
|
||||
|
||||
// Note: We don't attempt to verify that other's fields have valid
|
||||
// types. Doing so would be a losing battle. We'd have to verify
|
||||
// all sub-messages as well, and we'd have to make copies of all of
|
||||
// them to insure that they don't change after verification (since
|
||||
// the Message interface itself cannot enforce immutability of
|
||||
// implementations).
|
||||
// TODO(kenton): Provide a function somewhere called makeDeepCopy()
|
||||
// which allows people to make secure deep copies of messages.
|
||||
|
||||
for (final Map.Entry<FieldDescriptor, Object> entry :
|
||||
other.getAllFields().entrySet()) {
|
||||
final FieldDescriptor field = entry.getKey();
|
||||
if (field.isRepeated()) {
|
||||
for (final Object element : (List)entry.getValue()) {
|
||||
addRepeatedField(field, element);
|
||||
}
|
||||
} else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
|
||||
final Message existingValue = (Message)getField(field);
|
||||
if (existingValue == existingValue.getDefaultInstanceForType()) {
|
||||
setField(field, entry.getValue());
|
||||
} else {
|
||||
setField(field,
|
||||
existingValue.newBuilderForType()
|
||||
.mergeFrom(existingValue)
|
||||
.mergeFrom((Message)entry.getValue())
|
||||
.build());
|
||||
}
|
||||
} else {
|
||||
setField(field, entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
mergeUnknownFields(other.getUnknownFields());
|
||||
|
||||
return (BuilderType) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final CodedInputStream input)
|
||||
throws IOException {
|
||||
return mergeFrom(input, ExtensionRegistry.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final CodedInputStream input,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException {
|
||||
final UnknownFieldSet.Builder unknownFields =
|
||||
UnknownFieldSet.newBuilder(getUnknownFields());
|
||||
while (true) {
|
||||
final int tag = input.readTag();
|
||||
if (tag == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
MessageReflection.BuilderAdapter builderAdapter =
|
||||
new MessageReflection.BuilderAdapter(this);
|
||||
if (!MessageReflection.mergeFieldFrom(input, unknownFields,
|
||||
extensionRegistry,
|
||||
getDescriptorForType(),
|
||||
builderAdapter,
|
||||
tag)) {
|
||||
// end group tag
|
||||
break;
|
||||
}
|
||||
}
|
||||
setUnknownFields(unknownFields.build());
|
||||
return (BuilderType) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeUnknownFields(final UnknownFieldSet unknownFields) {
|
||||
setUnknownFields(
|
||||
UnknownFieldSet.newBuilder(getUnknownFields())
|
||||
.mergeFrom(unknownFields)
|
||||
.build());
|
||||
return (BuilderType) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Message.Builder getFieldBuilder(final FieldDescriptor field) {
|
||||
throw new UnsupportedOperationException(
|
||||
"getFieldBuilder() called on an unsupported message type.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Message.Builder getRepeatedFieldBuilder(final FieldDescriptor field, int index) {
|
||||
throw new UnsupportedOperationException(
|
||||
"getRepeatedFieldBuilder() called on an unsupported message type.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.printToString(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an UninitializedMessageException reporting missing fields in
|
||||
* the given message.
|
||||
*/
|
||||
protected static UninitializedMessageException
|
||||
newUninitializedMessageException(Message message) {
|
||||
return new UninitializedMessageException(
|
||||
MessageReflection.findMissingFields(message));
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to support nested builders and called to mark this builder as clean.
|
||||
* Clean builders will propagate the {@link BuilderParent#markDirty()} event
|
||||
* to their parent builders, while dirty builders will not, as their parents
|
||||
* should be dirty already.
|
||||
*
|
||||
* NOTE: Implementations that don't support nested builders don't need to
|
||||
* override this method.
|
||||
*/
|
||||
void markClean() {
|
||||
throw new IllegalStateException("Should be overridden by subclasses.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to support nested builders and called when this nested builder is
|
||||
* no longer used by its parent builder and should release the reference
|
||||
* to its parent builder.
|
||||
*
|
||||
* NOTE: Implementations that don't support nested builders don't need to
|
||||
* override this method.
|
||||
*/
|
||||
void dispose() {
|
||||
throw new IllegalStateException("Should be overridden by subclasses.");
|
||||
}
|
||||
|
||||
// ===============================================================
|
||||
// The following definitions seem to be required in order to make javac
|
||||
// not produce weird errors like:
|
||||
//
|
||||
// java/org.apache.hadoop.hbase.shaded.com.google.protobuf/DynamicMessage.java:203: types
|
||||
// org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessage.Builder<
|
||||
// org.apache.hadoop.hbase.shaded.com.google.protobuf.DynamicMessage.Builder> and
|
||||
// org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessage.Builder<
|
||||
// org.apache.hadoop.hbase.shaded.com.google.protobuf.DynamicMessage.Builder> are incompatible; both
|
||||
// define mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString), but with unrelated
|
||||
// return types.
|
||||
//
|
||||
// Strangely, these lines are only needed if javac is invoked separately
|
||||
// on AbstractMessage.java and AbstractMessageLite.java. If javac is
|
||||
// invoked on both simultaneously, it works. (Or maybe the important
|
||||
// point is whether or not DynamicMessage.java is compiled together with
|
||||
// AbstractMessageLite.java -- not sure.) I suspect this is a compiler
|
||||
// bug.
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final ByteString data)
|
||||
throws InvalidProtocolBufferException {
|
||||
return (BuilderType) super.mergeFrom(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final ByteString data,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return (BuilderType) super.mergeFrom(data, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final byte[] data)
|
||||
throws InvalidProtocolBufferException {
|
||||
return (BuilderType) super.mergeFrom(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final byte[] data, final int off, final int len)
|
||||
throws InvalidProtocolBufferException {
|
||||
return (BuilderType) super.mergeFrom(data, off, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final byte[] data,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return (BuilderType) super.mergeFrom(data, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final byte[] data, final int off, final int len,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return (BuilderType) super.mergeFrom(data, off, len, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final InputStream input)
|
||||
throws IOException {
|
||||
return (BuilderType) super.mergeFrom(input);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final InputStream input,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException {
|
||||
return (BuilderType) super.mergeFrom(input, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean mergeDelimitedFrom(final InputStream input)
|
||||
throws IOException {
|
||||
return super.mergeDelimitedFrom(input);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean mergeDelimitedFrom(
|
||||
final InputStream input,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException {
|
||||
return super.mergeDelimitedFrom(input, extensionRegistry);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1
|
||||
* generated code.
|
||||
*/
|
||||
@Deprecated
|
||||
protected static int hashLong(long n) {
|
||||
return (int) (n ^ (n >>> 32));
|
||||
}
|
||||
//
|
||||
/**
|
||||
* @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1
|
||||
* generated code.
|
||||
*/
|
||||
@Deprecated
|
||||
protected static int hashBoolean(boolean b) {
|
||||
return b ? 1231 : 1237;
|
||||
}
|
||||
//
|
||||
/**
|
||||
* @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1
|
||||
* generated code.
|
||||
*/
|
||||
@Deprecated
|
||||
protected static int hashEnum(EnumLite e) {
|
||||
return e.getNumber();
|
||||
}
|
||||
//
|
||||
/**
|
||||
* @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1
|
||||
* generated code.
|
||||
*/
|
||||
@Deprecated
|
||||
protected static int hashEnumList(List<? extends EnumLite> list) {
|
||||
int hash = 1;
|
||||
for (EnumLite e : list) {
|
||||
hash = 31 * hash + hashEnum(e);
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
}
|
|
@ -1,383 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.FilterInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* A partial implementation of the {@link MessageLite} interface which
|
||||
* implements as many methods of that interface as possible in terms of other
|
||||
* methods.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public abstract class AbstractMessageLite<
|
||||
MessageType extends AbstractMessageLite<MessageType, BuilderType>,
|
||||
BuilderType extends AbstractMessageLite.Builder<MessageType, BuilderType>>
|
||||
implements MessageLite {
|
||||
protected int memoizedHashCode = 0;
|
||||
@Override
|
||||
public ByteString toByteString() {
|
||||
try {
|
||||
final ByteString.CodedBuilder out =
|
||||
ByteString.newCodedBuilder(getSerializedSize());
|
||||
writeTo(out.getCodedOutput());
|
||||
return out.build();
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(getSerializingExceptionMessage("ByteString"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] toByteArray() {
|
||||
try {
|
||||
final byte[] result = new byte[getSerializedSize()];
|
||||
final CodedOutputStream output = CodedOutputStream.newInstance(result);
|
||||
writeTo(output);
|
||||
output.checkNoSpaceLeft();
|
||||
return result;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(getSerializingExceptionMessage("byte array"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final OutputStream output) throws IOException {
|
||||
final int bufferSize =
|
||||
CodedOutputStream.computePreferredBufferSize(getSerializedSize());
|
||||
final CodedOutputStream codedOutput =
|
||||
CodedOutputStream.newInstance(output, bufferSize);
|
||||
writeTo(codedOutput);
|
||||
codedOutput.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeDelimitedTo(final OutputStream output) throws IOException {
|
||||
final int serialized = getSerializedSize();
|
||||
final int bufferSize = CodedOutputStream.computePreferredBufferSize(
|
||||
CodedOutputStream.computeRawVarint32Size(serialized) + serialized);
|
||||
final CodedOutputStream codedOutput =
|
||||
CodedOutputStream.newInstance(output, bufferSize);
|
||||
codedOutput.writeRawVarint32(serialized);
|
||||
writeTo(codedOutput);
|
||||
codedOutput.flush();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Package private helper method for AbstractParser to create
|
||||
* UninitializedMessageException.
|
||||
*/
|
||||
UninitializedMessageException newUninitializedMessageException() {
|
||||
return new UninitializedMessageException(this);
|
||||
}
|
||||
|
||||
private String getSerializingExceptionMessage(String target) {
|
||||
return "Serializing " + getClass().getName() + " to a " + target
|
||||
+ " threw an IOException (should never happen).";
|
||||
}
|
||||
|
||||
protected static void checkByteStringIsUtf8(ByteString byteString)
|
||||
throws IllegalArgumentException {
|
||||
if (!byteString.isValidUtf8()) {
|
||||
throw new IllegalArgumentException("Byte string is not UTF-8.");
|
||||
}
|
||||
}
|
||||
|
||||
protected static <T> void addAll(final Iterable<T> values,
|
||||
final Collection<? super T> list) {
|
||||
Builder.addAll(values, list);
|
||||
}
|
||||
|
||||
/**
|
||||
* A partial implementation of the {@link Message.Builder} interface which
|
||||
* implements as many methods of that interface as possible in terms of
|
||||
* other methods.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public abstract static class Builder<
|
||||
MessageType extends AbstractMessageLite<MessageType, BuilderType>,
|
||||
BuilderType extends Builder<MessageType, BuilderType>>
|
||||
implements MessageLite.Builder {
|
||||
// The compiler produces an error if this is not declared explicitly.
|
||||
@Override
|
||||
public abstract BuilderType clone();
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final CodedInputStream input) throws IOException {
|
||||
return mergeFrom(input, ExtensionRegistryLite.getEmptyRegistry());
|
||||
}
|
||||
|
||||
// Re-defined here for return type covariance.
|
||||
@Override
|
||||
public abstract BuilderType mergeFrom(
|
||||
final CodedInputStream input, final ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final ByteString data) throws InvalidProtocolBufferException {
|
||||
try {
|
||||
final CodedInputStream input = data.newCodedInput();
|
||||
mergeFrom(input);
|
||||
input.checkLastTagWas(0);
|
||||
return (BuilderType) this;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(getReadingExceptionMessage("ByteString"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final ByteString data, final ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
try {
|
||||
final CodedInputStream input = data.newCodedInput();
|
||||
mergeFrom(input, extensionRegistry);
|
||||
input.checkLastTagWas(0);
|
||||
return (BuilderType) this;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(getReadingExceptionMessage("ByteString"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final byte[] data) throws InvalidProtocolBufferException {
|
||||
return mergeFrom(data, 0, data.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final byte[] data, final int off, final int len)
|
||||
throws InvalidProtocolBufferException {
|
||||
try {
|
||||
final CodedInputStream input =
|
||||
CodedInputStream.newInstance(data, off, len);
|
||||
mergeFrom(input);
|
||||
input.checkLastTagWas(0);
|
||||
return (BuilderType) this;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(getReadingExceptionMessage("byte array"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final byte[] data, final ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return mergeFrom(data, 0, data.length, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final byte[] data,
|
||||
final int off,
|
||||
final int len,
|
||||
final ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
try {
|
||||
final CodedInputStream input =
|
||||
CodedInputStream.newInstance(data, off, len);
|
||||
mergeFrom(input, extensionRegistry);
|
||||
input.checkLastTagWas(0);
|
||||
return (BuilderType) this;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(getReadingExceptionMessage("byte array"), e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(final InputStream input) throws IOException {
|
||||
final CodedInputStream codedInput = CodedInputStream.newInstance(input);
|
||||
mergeFrom(codedInput);
|
||||
codedInput.checkLastTagWas(0);
|
||||
return (BuilderType) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BuilderType mergeFrom(
|
||||
final InputStream input, final ExtensionRegistryLite extensionRegistry) throws IOException {
|
||||
final CodedInputStream codedInput = CodedInputStream.newInstance(input);
|
||||
mergeFrom(codedInput, extensionRegistry);
|
||||
codedInput.checkLastTagWas(0);
|
||||
return (BuilderType) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* An InputStream implementations which reads from some other InputStream
|
||||
* but is limited to a particular number of bytes. Used by
|
||||
* mergeDelimitedFrom(). This is intentionally package-private so that
|
||||
* UnknownFieldSet can share it.
|
||||
*/
|
||||
static final class LimitedInputStream extends FilterInputStream {
|
||||
private int limit;
|
||||
|
||||
LimitedInputStream(InputStream in, int limit) {
|
||||
super(in);
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
return Math.min(super.available(), limit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
if (limit <= 0) {
|
||||
return -1;
|
||||
}
|
||||
final int result = super.read();
|
||||
if (result >= 0) {
|
||||
--limit;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(final byte[] b, final int off, int len)
|
||||
throws IOException {
|
||||
if (limit <= 0) {
|
||||
return -1;
|
||||
}
|
||||
len = Math.min(len, limit);
|
||||
final int result = super.read(b, off, len);
|
||||
if (result >= 0) {
|
||||
limit -= result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long skip(final long n) throws IOException {
|
||||
final long result = super.skip(Math.min(n, limit));
|
||||
if (result >= 0) {
|
||||
limit -= result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean mergeDelimitedFrom(
|
||||
final InputStream input, final ExtensionRegistryLite extensionRegistry) throws IOException {
|
||||
final int firstByte = input.read();
|
||||
if (firstByte == -1) {
|
||||
return false;
|
||||
}
|
||||
final int size = CodedInputStream.readRawVarint32(firstByte, input);
|
||||
final InputStream limitedInput = new LimitedInputStream(input, size);
|
||||
mergeFrom(limitedInput, extensionRegistry);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean mergeDelimitedFrom(final InputStream input) throws IOException {
|
||||
return mergeDelimitedFrom(input,
|
||||
ExtensionRegistryLite.getEmptyRegistry());
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked") // isInstance takes care of this
|
||||
public BuilderType mergeFrom(final MessageLite other) {
|
||||
if (!getDefaultInstanceForType().getClass().isInstance(other)) {
|
||||
throw new IllegalArgumentException(
|
||||
"mergeFrom(MessageLite) can only merge messages of the same type.");
|
||||
}
|
||||
|
||||
return internalMergeFrom((MessageType) other);
|
||||
}
|
||||
|
||||
protected abstract BuilderType internalMergeFrom(MessageType message);
|
||||
|
||||
private String getReadingExceptionMessage(String target) {
|
||||
return "Reading " + getClass().getName() + " from a " + target
|
||||
+ " threw an IOException (should never happen).";
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an UninitializedMessageException reporting missing fields in
|
||||
* the given message.
|
||||
*/
|
||||
protected static UninitializedMessageException
|
||||
newUninitializedMessageException(MessageLite message) {
|
||||
return new UninitializedMessageException(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the {@code values} to the {@code list}. This is a helper method
|
||||
* used by generated code. Users should ignore it.
|
||||
*
|
||||
* @throws NullPointerException if {@code values} or any of the elements of
|
||||
* {@code values} is null. When that happens, some elements of
|
||||
* {@code values} may have already been added to the result {@code list}.
|
||||
*/
|
||||
protected static <T> void addAll(final Iterable<T> values,
|
||||
final Collection<? super T> list) {
|
||||
if (values == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (values instanceof LazyStringList) {
|
||||
// For StringOrByteStringLists, check the underlying elements to avoid
|
||||
// forcing conversions of ByteStrings to Strings.
|
||||
checkForNullValues(((LazyStringList) values).getUnderlyingElements());
|
||||
list.addAll((Collection<T>) values);
|
||||
} else if (values instanceof Collection) {
|
||||
checkForNullValues(values);
|
||||
list.addAll((Collection<T>) values);
|
||||
} else {
|
||||
for (final T value : values) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
list.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkForNullValues(final Iterable<?> values) {
|
||||
for (final Object value : values) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,258 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.LimitedInputStream;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* A partial implementation of the {@link Parser} interface which implements
|
||||
* as many methods of that interface as possible in terms of other methods.
|
||||
*
|
||||
* Note: This class implements all the convenience methods in the
|
||||
* {@link Parser} interface. See {@link Parser} for related javadocs.
|
||||
* Subclasses need to implement
|
||||
* {@link Parser#parsePartialFrom(CodedInputStream, ExtensionRegistryLite)}
|
||||
*
|
||||
* @author liujisi@google.com (Pherl Liu)
|
||||
*/
|
||||
public abstract class AbstractParser<MessageType extends MessageLite>
|
||||
implements Parser<MessageType> {
|
||||
/**
|
||||
* Creates an UninitializedMessageException for MessageType.
|
||||
*/
|
||||
private UninitializedMessageException
|
||||
newUninitializedMessageException(MessageType message) {
|
||||
if (message instanceof AbstractMessageLite) {
|
||||
return ((AbstractMessageLite) message).newUninitializedMessageException();
|
||||
}
|
||||
return new UninitializedMessageException(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to check if message is initialized.
|
||||
*
|
||||
* @throws InvalidProtocolBufferException if it is not initialized.
|
||||
* @return The message to check.
|
||||
*/
|
||||
private MessageType checkMessageInitialized(MessageType message)
|
||||
throws InvalidProtocolBufferException {
|
||||
if (message != null && !message.isInitialized()) {
|
||||
throw newUninitializedMessageException(message)
|
||||
.asInvalidProtocolBufferException()
|
||||
.setUnfinishedMessage(message);
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
private static final ExtensionRegistryLite EMPTY_REGISTRY
|
||||
= ExtensionRegistryLite.getEmptyRegistry();
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(CodedInputStream input)
|
||||
throws InvalidProtocolBufferException {
|
||||
return parsePartialFrom(input, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return checkMessageInitialized(
|
||||
parsePartialFrom(input, extensionRegistry));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(CodedInputStream input) throws InvalidProtocolBufferException {
|
||||
return parseFrom(input, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
MessageType message;
|
||||
try {
|
||||
CodedInputStream input = data.newCodedInput();
|
||||
message = parsePartialFrom(input, extensionRegistry);
|
||||
try {
|
||||
input.checkLastTagWas(0);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(message);
|
||||
}
|
||||
return message;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(ByteString data) throws InvalidProtocolBufferException {
|
||||
return parsePartialFrom(data, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return checkMessageInitialized(parsePartialFrom(data, extensionRegistry));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(ByteString data) throws InvalidProtocolBufferException {
|
||||
return parseFrom(data, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(
|
||||
byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
try {
|
||||
CodedInputStream input = CodedInputStream.newInstance(data, off, len);
|
||||
MessageType message = parsePartialFrom(input, extensionRegistry);
|
||||
try {
|
||||
input.checkLastTagWas(0);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(message);
|
||||
}
|
||||
return message;
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(byte[] data, int off, int len)
|
||||
throws InvalidProtocolBufferException {
|
||||
return parsePartialFrom(data, off, len, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return parsePartialFrom(data, 0, data.length, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(byte[] data) throws InvalidProtocolBufferException {
|
||||
return parsePartialFrom(data, 0, data.length, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(
|
||||
byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return checkMessageInitialized(
|
||||
parsePartialFrom(data, off, len, extensionRegistry));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(byte[] data, int off, int len)
|
||||
throws InvalidProtocolBufferException {
|
||||
return parseFrom(data, off, len, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return parseFrom(data, 0, data.length, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(byte[] data) throws InvalidProtocolBufferException {
|
||||
return parseFrom(data, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
CodedInputStream codedInput = CodedInputStream.newInstance(input);
|
||||
MessageType message = parsePartialFrom(codedInput, extensionRegistry);
|
||||
try {
|
||||
codedInput.checkLastTagWas(0);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(message);
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialFrom(InputStream input) throws InvalidProtocolBufferException {
|
||||
return parsePartialFrom(input, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return checkMessageInitialized(
|
||||
parsePartialFrom(input, extensionRegistry));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseFrom(InputStream input) throws InvalidProtocolBufferException {
|
||||
return parseFrom(input, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialDelimitedFrom(
|
||||
InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
int size;
|
||||
try {
|
||||
int firstByte = input.read();
|
||||
if (firstByte == -1) {
|
||||
return null;
|
||||
}
|
||||
size = CodedInputStream.readRawVarint32(firstByte, input);
|
||||
} catch (IOException e) {
|
||||
throw new InvalidProtocolBufferException(e);
|
||||
}
|
||||
InputStream limitedInput = new LimitedInputStream(input, size);
|
||||
return parsePartialFrom(limitedInput, extensionRegistry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parsePartialDelimitedFrom(InputStream input)
|
||||
throws InvalidProtocolBufferException {
|
||||
return parsePartialDelimitedFrom(input, EMPTY_REGISTRY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return checkMessageInitialized(
|
||||
parsePartialDelimitedFrom(input, extensionRegistry));
|
||||
}
|
||||
|
||||
@Override
|
||||
public MessageType parseDelimitedFrom(InputStream input) throws InvalidProtocolBufferException {
|
||||
return parseDelimitedFrom(input, EMPTY_REGISTRY);
|
||||
}
|
||||
}
|
|
@ -1,180 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.ProtobufList;
|
||||
|
||||
import java.util.AbstractList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An abstract implementation of {@link ProtobufList} which manages mutability semantics. All mutate
|
||||
* methods must check if the list is mutable before proceeding. Subclasses must invoke
|
||||
* {@link #ensureIsMutable()} manually when overriding those methods.
|
||||
* <p>
|
||||
* This implementation assumes all subclasses are array based, supporting random access.
|
||||
*/
|
||||
abstract class AbstractProtobufList<E> extends AbstractList<E> implements ProtobufList<E> {
|
||||
|
||||
protected static final int DEFAULT_CAPACITY = 10;
|
||||
|
||||
/**
|
||||
* Whether or not this list is modifiable.
|
||||
*/
|
||||
private boolean isMutable;
|
||||
|
||||
/**
|
||||
* Constructs a mutable list by default.
|
||||
*/
|
||||
AbstractProtobufList() {
|
||||
isMutable = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof List)) {
|
||||
return false;
|
||||
}
|
||||
// Handle lists that do not support RandomAccess as efficiently as possible by using an iterator
|
||||
// based approach in our super class. Otherwise our index based approach will avoid those
|
||||
// allocations.
|
||||
if (!(o instanceof RandomAccess)) {
|
||||
return super.equals(o);
|
||||
}
|
||||
|
||||
List<?> other = (List<?>) o;
|
||||
final int size = size();
|
||||
if (size != other.size()) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (!get(i).equals(other.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int size = size();
|
||||
int hashCode = 1;
|
||||
for (int i = 0; i < size; i++) {
|
||||
hashCode = (31 * hashCode) + get(i).hashCode();
|
||||
}
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
ensureIsMutable();
|
||||
return super.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, E element) {
|
||||
ensureIsMutable();
|
||||
super.add(index, element);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends E> c) {
|
||||
ensureIsMutable();
|
||||
return super.addAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(int index, Collection<? extends E> c) {
|
||||
ensureIsMutable();
|
||||
return super.addAll(index, c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
ensureIsMutable();
|
||||
super.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isModifiable() {
|
||||
return isMutable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void makeImmutable() {
|
||||
isMutable = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public E remove(int index) {
|
||||
ensureIsMutable();
|
||||
return super.remove(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
ensureIsMutable();
|
||||
return super.remove(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
ensureIsMutable();
|
||||
return super.removeAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
ensureIsMutable();
|
||||
return super.retainAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public E set(int index, E element) {
|
||||
ensureIsMutable();
|
||||
return super.set(index, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an {@link UnsupportedOperationException} if the list is immutable. Subclasses are
|
||||
* responsible for invoking this method on mutate operations.
|
||||
*/
|
||||
protected void ensureIsMutable() {
|
||||
if (!isMutable) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,899 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/any.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
* URL that describes the type of the serialized message.
|
||||
* Protobuf library provides support to pack/unpack Any values in the form
|
||||
* of utility functions or additional generated methods of the Any type.
|
||||
* Example 1: Pack and unpack a message in C++.
|
||||
* Foo foo = ...;
|
||||
* Any any;
|
||||
* any.PackFrom(foo);
|
||||
* ...
|
||||
* if (any.UnpackTo(&foo)) {
|
||||
* ...
|
||||
* }
|
||||
* Example 2: Pack and unpack a message in Java.
|
||||
* Foo foo = ...;
|
||||
* Any any = Any.pack(foo);
|
||||
* ...
|
||||
* if (any.is(Foo.class)) {
|
||||
* foo = any.unpack(Foo.class);
|
||||
* }
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
* foo = Foo(...)
|
||||
* any = Any()
|
||||
* any.Pack(foo)
|
||||
* ...
|
||||
* if any.Is(Foo.DESCRIPTOR):
|
||||
* any.Unpack(foo)
|
||||
* ...
|
||||
* The pack methods provided by protobuf library will by default use
|
||||
* 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
* methods only use the fully qualified type name after the last '/'
|
||||
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
* name "y.z".
|
||||
* JSON
|
||||
* ====
|
||||
* The JSON representation of an `Any` value uses the regular
|
||||
* representation of the deserialized, embedded message, with an
|
||||
* additional field `@type` which contains the type URL. Example:
|
||||
* package google.profile;
|
||||
* message Person {
|
||||
* string first_name = 1;
|
||||
* string last_name = 2;
|
||||
* }
|
||||
* {
|
||||
* "@type": "type.googleapis.com/google.profile.Person",
|
||||
* "firstName": <string>,
|
||||
* "lastName": <string>
|
||||
* }
|
||||
* If the embedded message type is well-known and has a custom JSON
|
||||
* representation, that representation will be embedded adding a field
|
||||
* `value` which holds the custom JSON in addition to the `@type`
|
||||
* field. Example (for message [google.protobuf.Duration][]):
|
||||
* {
|
||||
* "@type": "type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration",
|
||||
* "value": "1.212s"
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Any}
|
||||
*/
|
||||
public final class Any extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.Any)
|
||||
AnyOrBuilder {
|
||||
// Use Any.newBuilder() to construct.
|
||||
private Any(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private Any() {
|
||||
typeUrl_ = "";
|
||||
value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private Any(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
java.lang.String s = input.readStringRequireUtf8();
|
||||
|
||||
typeUrl_ = s;
|
||||
break;
|
||||
}
|
||||
case 18: {
|
||||
|
||||
value_ = input.readBytes();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.Builder.class);
|
||||
}
|
||||
|
||||
private static String getTypeUrl(
|
||||
java.lang.String typeUrlPrefix,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor descriptor) {
|
||||
return typeUrlPrefix.endsWith("/")
|
||||
? typeUrlPrefix + descriptor.getFullName()
|
||||
: typeUrlPrefix + "/" + descriptor.getFullName();
|
||||
}
|
||||
|
||||
private static String getTypeNameFromTypeUrl(
|
||||
java.lang.String typeUrl) {
|
||||
int pos = typeUrl.lastIndexOf('/');
|
||||
return pos == -1 ? "" : typeUrl.substring(pos + 1);
|
||||
}
|
||||
|
||||
public static <T extends org.apache.hadoop.hbase.shaded.com.google.protobuf.Message> Any pack(
|
||||
T message) {
|
||||
return Any.newBuilder()
|
||||
.setTypeUrl(getTypeUrl("type.googleapis.com",
|
||||
message.getDescriptorForType()))
|
||||
.setValue(message.toByteString())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Packs a message using the given type URL prefix. The type URL will
|
||||
* be constructed by concatenating the message type's full name to the
|
||||
* prefix with an optional "/" separator if the prefix doesn't end
|
||||
* with "/" already.
|
||||
*/
|
||||
public static <T extends org.apache.hadoop.hbase.shaded.com.google.protobuf.Message> Any pack(
|
||||
T message, java.lang.String typeUrlPrefix) {
|
||||
return Any.newBuilder()
|
||||
.setTypeUrl(getTypeUrl(typeUrlPrefix,
|
||||
message.getDescriptorForType()))
|
||||
.setValue(message.toByteString())
|
||||
.build();
|
||||
}
|
||||
|
||||
public <T extends org.apache.hadoop.hbase.shaded.com.google.protobuf.Message> boolean is(
|
||||
java.lang.Class<T> clazz) {
|
||||
T defaultInstance =
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.getDefaultInstance(clazz);
|
||||
return getTypeNameFromTypeUrl(getTypeUrl()).equals(
|
||||
defaultInstance.getDescriptorForType().getFullName());
|
||||
}
|
||||
|
||||
private volatile org.apache.hadoop.hbase.shaded.com.google.protobuf.Message cachedUnpackValue;
|
||||
|
||||
public <T extends org.apache.hadoop.hbase.shaded.com.google.protobuf.Message> T unpack(
|
||||
java.lang.Class<T> clazz)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
if (!is(clazz)) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
"Type of the Any message does not match the given class.");
|
||||
}
|
||||
if (cachedUnpackValue != null) {
|
||||
return (T) cachedUnpackValue;
|
||||
}
|
||||
T defaultInstance =
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.getDefaultInstance(clazz);
|
||||
T result = (T) defaultInstance.getParserForType()
|
||||
.parseFrom(getValue());
|
||||
cachedUnpackValue = result;
|
||||
return result;
|
||||
}
|
||||
public static final int TYPE_URL_FIELD_NUMBER = 1;
|
||||
private volatile java.lang.Object typeUrl_;
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public java.lang.String getTypeUrl() {
|
||||
java.lang.Object ref = typeUrl_;
|
||||
if (ref instanceof java.lang.String) {
|
||||
return (java.lang.String) ref;
|
||||
} else {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
|
||||
java.lang.String s = bs.toStringUtf8();
|
||||
typeUrl_ = s;
|
||||
return s;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getTypeUrlBytes() {
|
||||
java.lang.Object ref = typeUrl_;
|
||||
if (ref instanceof java.lang.String) {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
|
||||
(java.lang.String) ref);
|
||||
typeUrl_ = b;
|
||||
return b;
|
||||
} else {
|
||||
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
|
||||
}
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 2;
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_;
|
||||
/**
|
||||
* <pre>
|
||||
* Must be a valid serialized protocol buffer of the above specified type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (!getTypeUrlBytes().isEmpty()) {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, typeUrl_);
|
||||
}
|
||||
if (!value_.isEmpty()) {
|
||||
output.writeBytes(2, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (!getTypeUrlBytes().isEmpty()) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, typeUrl_);
|
||||
}
|
||||
if (!value_.isEmpty()) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeBytesSize(2, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Any)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Any other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && getTypeUrl()
|
||||
.equals(other.getTypeUrl());
|
||||
result = result && getValue()
|
||||
.equals(other.getValue());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + TYPE_URL_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getTypeUrl().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getValue().hashCode();
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Any prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* `Any` contains an arbitrary serialized protocol buffer message along with a
|
||||
* URL that describes the type of the serialized message.
|
||||
* Protobuf library provides support to pack/unpack Any values in the form
|
||||
* of utility functions or additional generated methods of the Any type.
|
||||
* Example 1: Pack and unpack a message in C++.
|
||||
* Foo foo = ...;
|
||||
* Any any;
|
||||
* any.PackFrom(foo);
|
||||
* ...
|
||||
* if (any.UnpackTo(&foo)) {
|
||||
* ...
|
||||
* }
|
||||
* Example 2: Pack and unpack a message in Java.
|
||||
* Foo foo = ...;
|
||||
* Any any = Any.pack(foo);
|
||||
* ...
|
||||
* if (any.is(Foo.class)) {
|
||||
* foo = any.unpack(Foo.class);
|
||||
* }
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
* foo = Foo(...)
|
||||
* any = Any()
|
||||
* any.Pack(foo)
|
||||
* ...
|
||||
* if any.Is(Foo.DESCRIPTOR):
|
||||
* any.Unpack(foo)
|
||||
* ...
|
||||
* The pack methods provided by protobuf library will by default use
|
||||
* 'type.googleapis.com/full.type.name' as the type URL and the unpack
|
||||
* methods only use the fully qualified type name after the last '/'
|
||||
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
* name "y.z".
|
||||
* JSON
|
||||
* ====
|
||||
* The JSON representation of an `Any` value uses the regular
|
||||
* representation of the deserialized, embedded message, with an
|
||||
* additional field `@type` which contains the type URL. Example:
|
||||
* package google.profile;
|
||||
* message Person {
|
||||
* string first_name = 1;
|
||||
* string last_name = 2;
|
||||
* }
|
||||
* {
|
||||
* "@type": "type.googleapis.com/google.profile.Person",
|
||||
* "firstName": <string>,
|
||||
* "lastName": <string>
|
||||
* }
|
||||
* If the embedded message type is well-known and has a custom JSON
|
||||
* representation, that representation will be embedded adding a field
|
||||
* `value` which holds the custom JSON in addition to the `@type`
|
||||
* field. Example (for message [google.protobuf.Duration][]):
|
||||
* {
|
||||
* "@type": "type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration",
|
||||
* "value": "1.212s"
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Any}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.Any)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
typeUrl_ = "";
|
||||
|
||||
value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Any result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Any result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Any(this);
|
||||
result.typeUrl_ = typeUrl_;
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Any)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Any other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.getDefaultInstance()) return this;
|
||||
if (!other.getTypeUrl().isEmpty()) {
|
||||
typeUrl_ = other.typeUrl_;
|
||||
onChanged();
|
||||
}
|
||||
if (other.getValue() != org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private java.lang.Object typeUrl_ = "";
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public java.lang.String getTypeUrl() {
|
||||
java.lang.Object ref = typeUrl_;
|
||||
if (!(ref instanceof java.lang.String)) {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
|
||||
java.lang.String s = bs.toStringUtf8();
|
||||
typeUrl_ = s;
|
||||
return s;
|
||||
} else {
|
||||
return (java.lang.String) ref;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getTypeUrlBytes() {
|
||||
java.lang.Object ref = typeUrl_;
|
||||
if (ref instanceof String) {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
|
||||
(java.lang.String) ref);
|
||||
typeUrl_ = b;
|
||||
return b;
|
||||
} else {
|
||||
return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public Builder setTypeUrl(
|
||||
java.lang.String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
typeUrl_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public Builder clearTypeUrl() {
|
||||
|
||||
typeUrl_ = getDefaultInstance().getTypeUrl();
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
public Builder setTypeUrlBytes(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
checkByteStringIsUtf8(value);
|
||||
|
||||
typeUrl_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
|
||||
/**
|
||||
* <pre>
|
||||
* Must be a valid serialized protocol buffer of the above specified type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 2;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Must be a valid serialized protocol buffer of the above specified type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 2;</code>
|
||||
*/
|
||||
public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Must be a valid serialized protocol buffer of the above specified type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 2;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = getDefaultInstance().getValue();
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.Any)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.Any)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Any DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Any();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Any>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Any>() {
|
||||
public Any parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new Any(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Any> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Any> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/any.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface AnyOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Any)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
java.lang.String getTypeUrl();
|
||||
/**
|
||||
* <pre>
|
||||
* A URL/resource name whose content describes the type of the
|
||||
* serialized protocol buffer message.
|
||||
* For URLs which use the scheme `http`, `https`, or no scheme, the
|
||||
* following restrictions and interpretations apply:
|
||||
* * If no scheme is provided, `https` is assumed.
|
||||
* * The last segment of the URL's path must represent the fully
|
||||
* qualified name of the type (as in `path/google.protobuf.Duration`).
|
||||
* The name should be in a canonical form (e.g., leading "." is
|
||||
* not accepted).
|
||||
* * An HTTP GET on the URL must yield a [google.protobuf.Type][]
|
||||
* value in binary format, or produce an error.
|
||||
* * Applications are allowed to cache lookup results based on the
|
||||
* URL, or have them precompiled into a binary to avoid any
|
||||
* lookup. Therefore, binary compatibility needs to be preserved
|
||||
* on changes to types. (Use versioned type names to manage
|
||||
* breaking changes.)
|
||||
* Schemes other than `http`, `https` (or the empty scheme) might be
|
||||
* used with implementation specific semantics.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getTypeUrlBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Must be a valid serialized protocol buffer of the above specified type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue();
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/any.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public final class AnyProto {
|
||||
private AnyProto() {}
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
|
||||
}
|
||||
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
|
||||
registerAllExtensions(
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
|
||||
}
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_Any_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_Any_fieldAccessorTable;
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\031google/protobuf/any.proto\022\017google.prot" +
|
||||
"obuf\"&\n\003Any\022\020\n\010type_url\030\001 \001(\t\022\r\n\005value\030\002" +
|
||||
" \001(\014Bo\n\023com.google.protobufB\010AnyProtoP\001Z" +
|
||||
"%github.com/golang/protobuf/ptypes/any\242\002" +
|
||||
"\003GPB\252\002\036Google.Protobuf.WellKnownTypesb\006p" +
|
||||
"roto3"
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
internal_static_google_protobuf_Any_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_google_protobuf_Any_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_Any_descriptor,
|
||||
new java.lang.String[] { "TypeUrl", "Value", });
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,258 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/api.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface ApiOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Api)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The fully qualified name of this api, including package name
|
||||
* followed by the api's simple name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 1;</code>
|
||||
*/
|
||||
java.lang.String getName();
|
||||
/**
|
||||
* <pre>
|
||||
* The fully qualified name of this api, including package name
|
||||
* followed by the api's simple name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getNameBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The methods of this api, in unspecified order.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Method methods = 2;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Method>
|
||||
getMethodsList();
|
||||
/**
|
||||
* <pre>
|
||||
* The methods of this api, in unspecified order.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Method methods = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Method getMethods(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* The methods of this api, in unspecified order.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Method methods = 2;</code>
|
||||
*/
|
||||
int getMethodsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* The methods of this api, in unspecified order.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Method methods = 2;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder>
|
||||
getMethodsOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* The methods of this api, in unspecified order.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Method methods = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder getMethodsOrBuilder(
|
||||
int index);
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Any metadata attached to the API.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option>
|
||||
getOptionsList();
|
||||
/**
|
||||
* <pre>
|
||||
* Any metadata attached to the API.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* Any metadata attached to the API.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
int getOptionsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* Any metadata attached to the API.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder>
|
||||
getOptionsOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* Any metadata attached to the API.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder(
|
||||
int index);
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* A version string for this api. If specified, must have the form
|
||||
* `major-version.minor-version`, as in `1.10`. If the minor version
|
||||
* is omitted, it defaults to zero. If the entire version field is
|
||||
* empty, the major version is derived from the package name, as
|
||||
* outlined below. If the field is not empty, the version in the
|
||||
* package name will be verified to be consistent with what is
|
||||
* provided here.
|
||||
* The versioning schema uses [semantic
|
||||
* versioning](http://semver.org) where the major version number
|
||||
* indicates a breaking change and the minor version an additive,
|
||||
* non-breaking change. Both version numbers are signals to users
|
||||
* what to expect from different versions, and should be carefully
|
||||
* chosen based on the product plan.
|
||||
* The major version is also reflected in the package name of the
|
||||
* API, which must end in `v<major-version>`, as in
|
||||
* `google.feature.v1`. For major versions 0 and 1, the suffix can
|
||||
* be omitted. Zero major versions must only be used for
|
||||
* experimental, none-GA apis.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string version = 4;</code>
|
||||
*/
|
||||
java.lang.String getVersion();
|
||||
/**
|
||||
* <pre>
|
||||
* A version string for this api. If specified, must have the form
|
||||
* `major-version.minor-version`, as in `1.10`. If the minor version
|
||||
* is omitted, it defaults to zero. If the entire version field is
|
||||
* empty, the major version is derived from the package name, as
|
||||
* outlined below. If the field is not empty, the version in the
|
||||
* package name will be verified to be consistent with what is
|
||||
* provided here.
|
||||
* The versioning schema uses [semantic
|
||||
* versioning](http://semver.org) where the major version number
|
||||
* indicates a breaking change and the minor version an additive,
|
||||
* non-breaking change. Both version numbers are signals to users
|
||||
* what to expect from different versions, and should be carefully
|
||||
* chosen based on the product plan.
|
||||
* The major version is also reflected in the package name of the
|
||||
* API, which must end in `v<major-version>`, as in
|
||||
* `google.feature.v1`. For major versions 0 and 1, the suffix can
|
||||
* be omitted. Zero major versions must only be used for
|
||||
* experimental, none-GA apis.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string version = 4;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getVersionBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Source context for the protocol buffer service represented by this
|
||||
* message.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.SourceContext source_context = 5;</code>
|
||||
*/
|
||||
boolean hasSourceContext();
|
||||
/**
|
||||
* <pre>
|
||||
* Source context for the protocol buffer service represented by this
|
||||
* message.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.SourceContext source_context = 5;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext();
|
||||
/**
|
||||
* <pre>
|
||||
* Source context for the protocol buffer service represented by this
|
||||
* message.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.SourceContext source_context = 5;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Included APIs. See [Mixin][].
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Mixin mixins = 6;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin>
|
||||
getMixinsList();
|
||||
/**
|
||||
* <pre>
|
||||
* Included APIs. See [Mixin][].
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Mixin mixins = 6;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin getMixins(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* Included APIs. See [Mixin][].
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Mixin mixins = 6;</code>
|
||||
*/
|
||||
int getMixinsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* Included APIs. See [Mixin][].
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Mixin mixins = 6;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder>
|
||||
getMixinsOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* Included APIs. See [Mixin][].
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Mixin mixins = 6;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder getMixinsOrBuilder(
|
||||
int index);
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The source syntax of the service.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Syntax syntax = 7;</code>
|
||||
*/
|
||||
int getSyntaxValue();
|
||||
/**
|
||||
* <pre>
|
||||
* The source syntax of the service.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Syntax syntax = 7;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax();
|
||||
}
|
|
@ -1,98 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/api.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public final class ApiProto {
|
||||
private ApiProto() {}
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
|
||||
}
|
||||
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
|
||||
registerAllExtensions(
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
|
||||
}
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_Api_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_Api_fieldAccessorTable;
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_Method_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_Method_fieldAccessorTable;
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_Mixin_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_Mixin_fieldAccessorTable;
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\031google/protobuf/api.proto\022\017google.prot" +
|
||||
"obuf\032$google/protobuf/source_context.pro" +
|
||||
"to\032\032google/protobuf/type.proto\"\201\002\n\003Api\022\014" +
|
||||
"\n\004name\030\001 \001(\t\022(\n\007methods\030\002 \003(\0132\027.google.p" +
|
||||
"rotobuf.Method\022(\n\007options\030\003 \003(\0132\027.google" +
|
||||
".protobuf.Option\022\017\n\007version\030\004 \001(\t\0226\n\016sou" +
|
||||
"rce_context\030\005 \001(\0132\036.google.protobuf.Sour" +
|
||||
"ceContext\022&\n\006mixins\030\006 \003(\0132\026.google.proto" +
|
||||
"buf.Mixin\022\'\n\006syntax\030\007 \001(\0162\027.google.proto" +
|
||||
"buf.Syntax\"\325\001\n\006Method\022\014\n\004name\030\001 \001(\t\022\030\n\020r",
|
||||
"equest_type_url\030\002 \001(\t\022\031\n\021request_streami" +
|
||||
"ng\030\003 \001(\010\022\031\n\021response_type_url\030\004 \001(\t\022\032\n\022r" +
|
||||
"esponse_streaming\030\005 \001(\010\022(\n\007options\030\006 \003(\013" +
|
||||
"2\027.google.protobuf.Option\022\'\n\006syntax\030\007 \001(" +
|
||||
"\0162\027.google.protobuf.Syntax\"#\n\005Mixin\022\014\n\004n" +
|
||||
"ame\030\001 \001(\t\022\014\n\004root\030\002 \001(\tBu\n\023com.google.pr" +
|
||||
"otobufB\010ApiProtoP\001Z+google.golang.org/ge" +
|
||||
"nproto/protobuf/api;api\242\002\003GPB\252\002\036Google.P" +
|
||||
"rotobuf.WellKnownTypesb\006proto3"
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.getDescriptor(),
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.getDescriptor(),
|
||||
}, assigner);
|
||||
internal_static_google_protobuf_Api_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_google_protobuf_Api_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_Api_descriptor,
|
||||
new java.lang.String[] { "Name", "Methods", "Options", "Version", "SourceContext", "Mixins", "Syntax", });
|
||||
internal_static_google_protobuf_Method_descriptor =
|
||||
getDescriptor().getMessageTypes().get(1);
|
||||
internal_static_google_protobuf_Method_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_Method_descriptor,
|
||||
new java.lang.String[] { "Name", "RequestTypeUrl", "RequestStreaming", "ResponseTypeUrl", "ResponseStreaming", "Options", "Syntax", });
|
||||
internal_static_google_protobuf_Mixin_descriptor =
|
||||
getDescriptor().getMessageTypes().get(2);
|
||||
internal_static_google_protobuf_Mixin_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_Mixin_descriptor,
|
||||
new java.lang.String[] { "Name", "Root", });
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.getDescriptor();
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.getDescriptor();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <p>Abstract interface for a blocking RPC channel. {@code BlockingRpcChannel}
|
||||
* is the blocking equivalent to {@link RpcChannel}.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
* @author cpovirk@google.com Chris Povirk
|
||||
*/
|
||||
public interface BlockingRpcChannel {
|
||||
/**
|
||||
* Call the given method of the remote service and blocks until it returns.
|
||||
* {@code callBlockingMethod()} is the blocking equivalent to
|
||||
* {@link RpcChannel#callMethod}.
|
||||
*/
|
||||
Message callBlockingMethod(
|
||||
Descriptors.MethodDescriptor method,
|
||||
RpcController controller,
|
||||
Message request,
|
||||
Message responsePrototype) throws ServiceException;
|
||||
}
|
|
@ -1,64 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* Blocking equivalent to {@link Service}.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
* @author cpovirk@google.com Chris Povirk
|
||||
*/
|
||||
public interface BlockingService {
|
||||
/**
|
||||
* Equivalent to {@link Service#getDescriptorForType}.
|
||||
*/
|
||||
Descriptors.ServiceDescriptor getDescriptorForType();
|
||||
|
||||
/**
|
||||
* Equivalent to {@link Service#callMethod}, except that
|
||||
* {@code callBlockingMethod()} returns the result of the RPC or throws a
|
||||
* {@link ServiceException} if there is a failure, rather than passing the
|
||||
* information to a callback.
|
||||
*/
|
||||
Message callBlockingMethod(Descriptors.MethodDescriptor method,
|
||||
RpcController controller,
|
||||
Message request) throws ServiceException;
|
||||
|
||||
/**
|
||||
* Equivalent to {@link Service#getRequestPrototype}.
|
||||
*/
|
||||
Message getRequestPrototype(Descriptors.MethodDescriptor method);
|
||||
|
||||
/**
|
||||
* Equivalent to {@link Service#getResponsePrototype}.
|
||||
*/
|
||||
Message getResponsePrototype(Descriptors.MethodDescriptor method);
|
||||
}
|
|
@ -1,452 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `bool`.
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.BoolValue}
|
||||
*/
|
||||
public final class BoolValue extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.BoolValue)
|
||||
BoolValueOrBuilder {
|
||||
// Use BoolValue.newBuilder() to construct.
|
||||
private BoolValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private BoolValue() {
|
||||
value_ = false;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private BoolValue(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
|
||||
value_ = input.readBool();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 1;
|
||||
private boolean value_;
|
||||
/**
|
||||
* <pre>
|
||||
* The bool value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bool value = 1;</code>
|
||||
*/
|
||||
public boolean getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (value_ != false) {
|
||||
output.writeBool(1, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (value_ != false) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeBoolSize(1, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (getValue()
|
||||
== other.getValue());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean(
|
||||
getValue());
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `bool`.
|
||||
* The JSON representation for `BoolValue` is JSON `true` and `false`.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.BoolValue}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.BoolValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
value_ = false;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue(this);
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.getDefaultInstance()) return this;
|
||||
if (other.getValue() != false) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private boolean value_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* The bool value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bool value = 1;</code>
|
||||
*/
|
||||
public boolean getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The bool value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bool value = 1;</code>
|
||||
*/
|
||||
public Builder setValue(boolean value) {
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The bool value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bool value = 1;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = false;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.BoolValue)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.BoolValue)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BoolValue>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BoolValue>() {
|
||||
public BoolValue parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new BoolValue(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BoolValue> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BoolValue> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface BoolValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.BoolValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The bool value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bool value = 1;</code>
|
||||
*/
|
||||
boolean getValue();
|
||||
}
|
|
@ -1,272 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.BooleanList;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An implementation of {@link BooleanList} on top of a primitive array.
|
||||
*
|
||||
* @author dweis@google.com (Daniel Weis)
|
||||
*/
|
||||
final class BooleanArrayList
|
||||
extends AbstractProtobufList<Boolean>
|
||||
implements BooleanList, RandomAccess {
|
||||
|
||||
private static final BooleanArrayList EMPTY_LIST = new BooleanArrayList();
|
||||
static {
|
||||
EMPTY_LIST.makeImmutable();
|
||||
}
|
||||
|
||||
public static BooleanArrayList emptyList() {
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
/**
|
||||
* The backing store for the list.
|
||||
*/
|
||||
private boolean[] array;
|
||||
|
||||
/**
|
||||
* The size of the list distinct from the length of the array. That is, it is the number of
|
||||
* elements set in the list.
|
||||
*/
|
||||
private int size;
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code BooleanArrayList} with default capacity.
|
||||
*/
|
||||
BooleanArrayList() {
|
||||
this(new boolean[DEFAULT_CAPACITY], 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code BooleanArrayList}
|
||||
* containing the same elements as {@code other}.
|
||||
*/
|
||||
private BooleanArrayList(boolean[] other, int size) {
|
||||
array = other;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof BooleanArrayList)) {
|
||||
return super.equals(o);
|
||||
}
|
||||
BooleanArrayList other = (BooleanArrayList) o;
|
||||
if (size != other.size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final boolean[] arr = other.array;
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (array[i] != arr[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1;
|
||||
for (int i = 0; i < size; i++) {
|
||||
result = (31 * result) + Internal.hashBoolean(array[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BooleanList mutableCopyWithCapacity(int capacity) {
|
||||
if (capacity < size) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return new BooleanArrayList(Arrays.copyOf(array, capacity), size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean get(int index) {
|
||||
return getBoolean(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean getBoolean(int index) {
|
||||
ensureIndexInRange(index);
|
||||
return array[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean set(int index, Boolean element) {
|
||||
return setBoolean(index, element);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean setBoolean(int index, boolean element) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
boolean previousValue = array[index];
|
||||
array[index] = element;
|
||||
return previousValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, Boolean element) {
|
||||
addBoolean(index, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(Boolean)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
@Override
|
||||
public void addBoolean(boolean element) {
|
||||
addBoolean(size, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(int, Boolean)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
private void addBoolean(int index, boolean element) {
|
||||
ensureIsMutable();
|
||||
if (index < 0 || index > size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
|
||||
if (size < array.length) {
|
||||
// Shift everything over to make room
|
||||
System.arraycopy(array, index, array, index + 1, size - index);
|
||||
} else {
|
||||
// Resize to 1.5x the size
|
||||
int length = ((size * 3) / 2) + 1;
|
||||
boolean[] newArray = new boolean[length];
|
||||
|
||||
// Copy the first part directly
|
||||
System.arraycopy(array, 0, newArray, 0, index);
|
||||
|
||||
// Copy the rest shifted over by one to make room
|
||||
System.arraycopy(array, index, newArray, index + 1, size - index);
|
||||
array = newArray;
|
||||
}
|
||||
|
||||
array[index] = element;
|
||||
size++;
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends Boolean> collection) {
|
||||
ensureIsMutable();
|
||||
|
||||
if (collection == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
// We specialize when adding another BooleanArrayList to avoid boxing elements.
|
||||
if (!(collection instanceof BooleanArrayList)) {
|
||||
return super.addAll(collection);
|
||||
}
|
||||
|
||||
BooleanArrayList list = (BooleanArrayList) collection;
|
||||
if (list.size == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int overflow = Integer.MAX_VALUE - size;
|
||||
if (overflow < list.size) {
|
||||
// We can't actually represent a list this large.
|
||||
throw new OutOfMemoryError();
|
||||
}
|
||||
|
||||
int newSize = size + list.size;
|
||||
if (newSize > array.length) {
|
||||
array = Arrays.copyOf(array, newSize);
|
||||
}
|
||||
|
||||
System.arraycopy(list.array, 0, array, size, list.size);
|
||||
size = newSize;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
ensureIsMutable();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (o.equals(array[i])) {
|
||||
System.arraycopy(array, i + 1, array, i, size - i);
|
||||
size--;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean remove(int index) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
boolean value = array[index];
|
||||
System.arraycopy(array, index + 1, array, index, size - index);
|
||||
size--;
|
||||
modCount++;
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an
|
||||
* {@link IndexOutOfBoundsException} if it is not.
|
||||
*
|
||||
* @param index the index to verify is in range
|
||||
*/
|
||||
private void ensureIndexInRange(int index) {
|
||||
if (index < 0 || index >= size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
}
|
||||
|
||||
private String makeOutOfBoundsExceptionMessage(int index) {
|
||||
return "Index:" + index + ", Size:" + size;
|
||||
}
|
||||
}
|
|
@ -1,185 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.ref.SoftReference;
|
||||
import java.lang.reflect.Field;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.WritableByteChannel;
|
||||
|
||||
/**
|
||||
* Utility class to provide efficient writing of {@link ByteBuffer}s to {@link OutputStream}s.
|
||||
*/
|
||||
final class ByteBufferWriter {
|
||||
private ByteBufferWriter() {}
|
||||
|
||||
/**
|
||||
* Minimum size for a cached buffer. This prevents us from allocating buffers that are too
|
||||
* small to be easily reused.
|
||||
*/
|
||||
// TODO(nathanmittler): tune this property or allow configuration?
|
||||
private static final int MIN_CACHED_BUFFER_SIZE = 1024;
|
||||
|
||||
/**
|
||||
* Maximum size for a cached buffer. If a larger buffer is required, it will be allocated
|
||||
* but not cached.
|
||||
*/
|
||||
// TODO(nathanmittler): tune this property or allow configuration?
|
||||
private static final int MAX_CACHED_BUFFER_SIZE = 16 * 1024;
|
||||
|
||||
/**
|
||||
* The fraction of the requested buffer size under which the buffer will be reallocated.
|
||||
*/
|
||||
// TODO(nathanmittler): tune this property or allow configuration?
|
||||
private static final float BUFFER_REALLOCATION_THRESHOLD = 0.5f;
|
||||
|
||||
/**
|
||||
* Keeping a soft reference to a thread-local buffer. This buffer is used for writing a
|
||||
* {@link ByteBuffer} to an {@link OutputStream} when no zero-copy alternative was available.
|
||||
* Using a "soft" reference since VMs may keep this reference around longer than "weak"
|
||||
* (e.g. HotSpot will maintain soft references until memory pressure warrants collection).
|
||||
*/
|
||||
private static final ThreadLocal<SoftReference<byte[]>> BUFFER =
|
||||
new ThreadLocal<SoftReference<byte[]>>();
|
||||
|
||||
/**
|
||||
* This is a hack for GAE, where {@code FileOutputStream} is unavailable.
|
||||
*/
|
||||
private static final Class<?> FILE_OUTPUT_STREAM_CLASS = safeGetClass("java.io.FileOutputStream");
|
||||
private static final long CHANNEL_FIELD_OFFSET = getChannelFieldOffset(FILE_OUTPUT_STREAM_CLASS);
|
||||
|
||||
/**
|
||||
* For testing purposes only. Clears the cached buffer to force a new allocation on the next
|
||||
* invocation.
|
||||
*/
|
||||
static void clearCachedBuffer() {
|
||||
BUFFER.set(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the remaining content of the buffer to the given stream. The buffer {@code position}
|
||||
* will remain unchanged by this method.
|
||||
*/
|
||||
static void write(ByteBuffer buffer, OutputStream output) throws IOException {
|
||||
final int initialPos = buffer.position();
|
||||
try {
|
||||
if (buffer.hasArray()) {
|
||||
// Optimized write for array-backed buffers.
|
||||
// Note that we're taking the risk that a malicious OutputStream could modify the array.
|
||||
output.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining());
|
||||
} else if (!writeToChannel(buffer, output)){
|
||||
// Read all of the data from the buffer to an array.
|
||||
// TODO(nathanmittler): Consider performance improvements for other "known" stream types.
|
||||
final byte[] array = getOrCreateBuffer(buffer.remaining());
|
||||
while (buffer.hasRemaining()) {
|
||||
int length = min(buffer.remaining(), array.length);
|
||||
buffer.get(array, 0, length);
|
||||
output.write(array, 0, length);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
// Restore the initial position.
|
||||
buffer.position(initialPos);
|
||||
}
|
||||
}
|
||||
|
||||
static byte[] getOrCreateBuffer(int requestedSize) {
|
||||
requestedSize = max(requestedSize, MIN_CACHED_BUFFER_SIZE);
|
||||
|
||||
byte[] buffer = getBuffer();
|
||||
// Only allocate if we need to.
|
||||
if (buffer == null || needToReallocate(requestedSize, buffer.length)) {
|
||||
buffer = new byte[requestedSize];
|
||||
|
||||
// Only cache the buffer if it's not too big.
|
||||
if (requestedSize <= MAX_CACHED_BUFFER_SIZE) {
|
||||
setBuffer(buffer);
|
||||
}
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
private static boolean needToReallocate(int requestedSize, int bufferLength) {
|
||||
// First check against just the requested length to avoid the multiply.
|
||||
return bufferLength < requestedSize
|
||||
&& bufferLength < requestedSize * BUFFER_REALLOCATION_THRESHOLD;
|
||||
}
|
||||
|
||||
private static byte[] getBuffer() {
|
||||
SoftReference<byte[]> sr = BUFFER.get();
|
||||
return sr == null ? null : sr.get();
|
||||
}
|
||||
|
||||
private static void setBuffer(byte[] value) {
|
||||
BUFFER.set(new SoftReference<byte[]>(value));
|
||||
}
|
||||
|
||||
private static boolean writeToChannel(ByteBuffer buffer, OutputStream output) throws IOException {
|
||||
if (CHANNEL_FIELD_OFFSET >= 0 && FILE_OUTPUT_STREAM_CLASS.isInstance(output)) {
|
||||
// Use a channel to write out the ByteBuffer. This will automatically empty the buffer.
|
||||
WritableByteChannel channel = null;
|
||||
try {
|
||||
channel = (WritableByteChannel) UnsafeUtil.getObject(output, CHANNEL_FIELD_OFFSET);
|
||||
} catch (ClassCastException e) {
|
||||
// Absorb.
|
||||
}
|
||||
if (channel != null) {
|
||||
channel.write(buffer);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static Class<?> safeGetClass(String className) {
|
||||
try {
|
||||
return Class.forName(className);
|
||||
} catch (ClassNotFoundException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
private static long getChannelFieldOffset(Class<?> clazz) {
|
||||
try {
|
||||
if (clazz != null && UnsafeUtil.hasUnsafeArrayOperations()) {
|
||||
Field field = clazz.getDeclaredField("channel");
|
||||
return UnsafeUtil.objectFieldOffset(field);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// Absorb
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* An input for raw bytes. This is similar to an InputStream but it is offset addressable. All the
|
||||
* read APIs are relative.
|
||||
*/
|
||||
@ExperimentalApi
|
||||
public abstract class ByteInput {
|
||||
|
||||
/**
|
||||
* Reads a single byte from the given offset.
|
||||
* @param offset The offset from where byte to be read
|
||||
* @return The byte of data at given offset
|
||||
*/
|
||||
public abstract byte read(int offset);
|
||||
|
||||
/**
|
||||
* Reads bytes of data from the given offset into an array of bytes.
|
||||
* @param offset The src offset within this ByteInput from where data to be read.
|
||||
* @param out Destination byte array to read data into.
|
||||
* @return The number of bytes read from ByteInput
|
||||
*/
|
||||
public int read(int offset, byte b[]) throws IOException {
|
||||
return read(offset, b, 0, b.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads up to <code>len</code> bytes of data from the given offset into an array of bytes.
|
||||
* @param offset The src offset within this ByteInput from where data to be read.
|
||||
* @param out Destination byte array to read data into.
|
||||
* @param outOffset Offset within the the out byte[] where data to be read into.
|
||||
* @param len The number of bytes to read.
|
||||
* @return The number of bytes read from ByteInput
|
||||
*/
|
||||
public abstract int read(int offset, byte[] out, int outOffset, int len);
|
||||
|
||||
/**
|
||||
* Reads bytes of data from the given offset into given {@link ByteBuffer}.
|
||||
* @param offset he src offset within this ByteInput from where data to be read.
|
||||
* @param out Destination {@link ByteBuffer} to read data into.
|
||||
* @return The number of bytes read from ByteInput
|
||||
*/
|
||||
public abstract int read(int offset, ByteBuffer out);
|
||||
|
||||
/**
|
||||
* @return Total number of bytes in this ByteInput.
|
||||
*/
|
||||
public abstract int size();
|
||||
}
|
|
@ -1,251 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InvalidObjectException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A {@link ByteString} that wraps around a {@link ByteInput}.
|
||||
*/
|
||||
final class ByteInputByteString extends ByteString.LeafByteString {
|
||||
private final ByteInput buffer;
|
||||
private final int offset, length;
|
||||
|
||||
ByteInputByteString(ByteInput buffer, int offset, int length) {
|
||||
if (buffer == null) {
|
||||
throw new NullPointerException("buffer");
|
||||
}
|
||||
this.buffer = buffer;
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Serializable
|
||||
|
||||
/**
|
||||
* Magic method that lets us override serialization behavior.
|
||||
*/
|
||||
private Object writeReplace() {
|
||||
return ByteString.wrap(toByteArray());
|
||||
}
|
||||
|
||||
/**
|
||||
* Magic method that lets us override deserialization behavior.
|
||||
*/
|
||||
private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException {
|
||||
throw new InvalidObjectException("ByteInputByteString instances are not to be serialized directly");// TODO check here
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
@Override
|
||||
public byte byteAt(int index) {
|
||||
return buffer.read(getAbsoluteOffset(index));
|
||||
}
|
||||
|
||||
private int getAbsoluteOffset(int relativeOffset) {
|
||||
return this.offset + relativeOffset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteString substring(int beginIndex, int endIndex) {
|
||||
if (beginIndex < 0 || beginIndex >= size() || endIndex < beginIndex || endIndex >= size()) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Invalid indices [%d, %d]", beginIndex, endIndex));
|
||||
}
|
||||
return new ByteInputByteString(this.buffer, getAbsoluteOffset(beginIndex), endIndex - beginIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void copyToInternal(
|
||||
byte[] target, int sourceOffset, int targetOffset, int numberToCopy) {
|
||||
this.buffer.read(getAbsoluteOffset(sourceOffset), target, targetOffset, numberToCopy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyTo(ByteBuffer target) {
|
||||
this.buffer.read(this.offset, target);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(OutputStream out) throws IOException {
|
||||
out.write(toByteArray());// TODO
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean equalsRange(ByteString other, int offset, int length) {
|
||||
return substring(0, length).equals(other.substring(offset, offset + length));
|
||||
}
|
||||
|
||||
@Override
|
||||
void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) throws IOException {
|
||||
byte[] buf = ByteBufferWriter.getOrCreateBuffer(numberToWrite);
|
||||
this.buffer.read(getAbsoluteOffset(sourceOffset), buf, 0, numberToWrite);
|
||||
out.write(buf, 0, numberToWrite);
|
||||
}
|
||||
|
||||
@Override
|
||||
void writeTo(ByteOutput output) throws IOException {
|
||||
output.writeLazy(toByteArray(), 0, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer asReadOnlyByteBuffer() {
|
||||
return ByteBuffer.wrap(toByteArray()).asReadOnlyBuffer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ByteBuffer> asReadOnlyByteBufferList() {
|
||||
return Collections.singletonList(asReadOnlyByteBuffer());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String toStringInternal(Charset charset) {
|
||||
byte[] bytes = toByteArray();
|
||||
return new String(bytes, 0, bytes.length, charset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValidUtf8() {
|
||||
return Utf8.isValidUtf8(buffer, offset, offset + length);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int partialIsValidUtf8(int state, int offset, int length) {
|
||||
int off = getAbsoluteOffset(offset);
|
||||
return Utf8.partialIsValidUtf8(state, buffer, off, off + length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(other instanceof ByteString)) {
|
||||
return false;
|
||||
}
|
||||
ByteString otherString = ((ByteString) other);
|
||||
if (size() != otherString.size()) {
|
||||
return false;
|
||||
}
|
||||
if (size() == 0) {
|
||||
return true;
|
||||
}
|
||||
if (other instanceof RopeByteString) {
|
||||
return other.equals(this);
|
||||
}
|
||||
return Arrays.equals(this.toByteArray(), otherString.toByteArray());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int partialHash(int h, int offset, int length) {
|
||||
offset = getAbsoluteOffset(offset);
|
||||
int end = offset + length;
|
||||
for (int i = offset; i < end; i++) {
|
||||
h = h * 31 + buffer.read(i);
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream newInput() {
|
||||
return new InputStream() {
|
||||
private final ByteInput buf = buffer;
|
||||
private int pos = offset;
|
||||
private int limit = pos + length;
|
||||
private int mark = pos;
|
||||
|
||||
@Override
|
||||
public void mark(int readlimit) {
|
||||
this.mark = readlimit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean markSupported() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
this.pos = this.mark;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
return this.limit - this.pos;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
if (available() <= 0) {
|
||||
return -1;
|
||||
}
|
||||
return this.buf.read(pos++) & 0xFF;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] bytes, int off, int len) throws IOException {
|
||||
int remain = available();
|
||||
if (remain <= 0) {
|
||||
return -1;
|
||||
}
|
||||
len = Math.min(len, remain);
|
||||
buf.read(pos, bytes, off, len);
|
||||
pos += len;
|
||||
return len;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public CodedInputStream newCodedInput() {
|
||||
// We trust CodedInputStream not to modify the bytes, or to give anyone
|
||||
// else access to them.
|
||||
CodedInputStream cis = CodedInputStream.newInstance(buffer, offset, length, true);
|
||||
cis.enableAliasing(true);
|
||||
return cis;
|
||||
}
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* An output target for raw bytes. This interface provides semantics that support two types of
|
||||
* writing:
|
||||
*
|
||||
* <p><b>Traditional write operations:</b>
|
||||
* (as defined by {@link java.io.OutputStream}) where the target method is responsible for either
|
||||
* copying the data or completing the write before returning from the method call.
|
||||
*
|
||||
* <p><b>Lazy write operations:</b> where the caller guarantees that it will never modify the
|
||||
* provided buffer and it can therefore be considered immutable. The target method is free to
|
||||
* maintain a reference to the buffer beyond the scope of the method call (e.g. until the write
|
||||
* operation completes).
|
||||
*/
|
||||
@ExperimentalApi
|
||||
public abstract class ByteOutput {
|
||||
/**
|
||||
* Writes a single byte.
|
||||
*
|
||||
* @param value the byte to be written
|
||||
* @throws IOException thrown if an error occurred while writing
|
||||
*/
|
||||
public abstract void write(byte value) throws IOException;
|
||||
|
||||
/**
|
||||
* Writes a sequence of bytes. The {@link ByteOutput} must copy {@code value} if it will
|
||||
* not be processed prior to the return of this method call, since {@code value} may be
|
||||
* reused/altered by the caller.
|
||||
*
|
||||
* <p>NOTE: This method <strong>MUST NOT</strong> modify the {@code value}. Doing so is a
|
||||
* programming error and will lead to data corruption which will be difficult to debug.
|
||||
*
|
||||
* @param value the bytes to be written
|
||||
* @param offset the offset of the start of the writable range
|
||||
* @param length the number of bytes to write starting from {@code offset}
|
||||
* @throws IOException thrown if an error occurred while writing
|
||||
*/
|
||||
public abstract void write(byte[] value, int offset, int length) throws IOException;
|
||||
|
||||
/**
|
||||
* Writes a sequence of bytes. The {@link ByteOutput} is free to retain a reference to the value
|
||||
* beyond the scope of this method call (e.g. write later) since it is considered immutable and is
|
||||
* guaranteed not to change by the caller.
|
||||
*
|
||||
* <p>NOTE: This method <strong>MUST NOT</strong> modify the {@code value}. Doing so is a
|
||||
* programming error and will lead to data corruption which will be difficult to debug.
|
||||
*
|
||||
* @param value the bytes to be written
|
||||
* @param offset the offset of the start of the writable range
|
||||
* @param length the number of bytes to write starting from {@code offset}
|
||||
* @throws IOException thrown if an error occurred while writing
|
||||
*/
|
||||
public abstract void writeLazy(byte[] value, int offset, int length) throws IOException;
|
||||
|
||||
/**
|
||||
* Writes a sequence of bytes. The {@link ByteOutput} must copy {@code value} if it will
|
||||
* not be processed prior to the return of this method call, since {@code value} may be
|
||||
* reused/altered by the caller.
|
||||
*
|
||||
* <p>NOTE: This method <strong>MUST NOT</strong> modify the {@code value}. Doing so is a
|
||||
* programming error and will lead to data corruption which will be difficult to debug.
|
||||
*
|
||||
* @param value the bytes to be written. Upon returning from this call, the {@code position} of
|
||||
* this buffer will be set to the {@code limit}
|
||||
* @throws IOException thrown if an error occurred while writing
|
||||
*/
|
||||
public abstract void write(ByteBuffer value) throws IOException;
|
||||
|
||||
/**
|
||||
* Writes a sequence of bytes. The {@link ByteOutput} is free to retain a reference to the value
|
||||
* beyond the scope of this method call (e.g. write later) since it is considered immutable and is
|
||||
* guaranteed not to change by the caller.
|
||||
*
|
||||
* <p>NOTE: This method <strong>MUST NOT</strong> modify the {@code value}. Doing so is a
|
||||
* programming error and will lead to data corruption which will be difficult to debug.
|
||||
*
|
||||
* @param value the bytes to be written. Upon returning from this call, the {@code position} of
|
||||
* this buffer will be set to the {@code limit}
|
||||
* @throws IOException thrown if an error occurred while writing
|
||||
*/
|
||||
public abstract void writeLazy(ByteBuffer value) throws IOException;
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,454 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `bytes`.
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.BytesValue}
|
||||
*/
|
||||
public final class BytesValue extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.BytesValue)
|
||||
BytesValueOrBuilder {
|
||||
// Use BytesValue.newBuilder() to construct.
|
||||
private BytesValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private BytesValue() {
|
||||
value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private BytesValue(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
|
||||
value_ = input.readBytes();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_;
|
||||
/**
|
||||
* <pre>
|
||||
* The bytes value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (!value_.isEmpty()) {
|
||||
output.writeBytes(1, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (!value_.isEmpty()) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeBytesSize(1, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && getValue()
|
||||
.equals(other.getValue());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getValue().hashCode();
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `bytes`.
|
||||
* The JSON representation for `BytesValue` is JSON string.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.BytesValue}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.BytesValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue(this);
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.getDefaultInstance()) return this;
|
||||
if (other.getValue() != org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
|
||||
/**
|
||||
* <pre>
|
||||
* The bytes value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The bytes value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 1;</code>
|
||||
*/
|
||||
public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The bytes value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 1;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = getDefaultInstance().getValue();
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.BytesValue)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.BytesValue)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BytesValue>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<BytesValue>() {
|
||||
public BytesValue parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new BytesValue(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BytesValue> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<BytesValue> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface BytesValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.BytesValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The bytes value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bytes value = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue();
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,273 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.DoubleList;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An implementation of {@link DoubleList} on top of a primitive array.
|
||||
*
|
||||
* @author dweis@google.com (Daniel Weis)
|
||||
*/
|
||||
final class DoubleArrayList
|
||||
extends AbstractProtobufList<Double>
|
||||
implements DoubleList, RandomAccess {
|
||||
|
||||
private static final DoubleArrayList EMPTY_LIST = new DoubleArrayList();
|
||||
static {
|
||||
EMPTY_LIST.makeImmutable();
|
||||
}
|
||||
|
||||
public static DoubleArrayList emptyList() {
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
/**
|
||||
* The backing store for the list.
|
||||
*/
|
||||
private double[] array;
|
||||
|
||||
/**
|
||||
* The size of the list distinct from the length of the array. That is, it is the number of
|
||||
* elements set in the list.
|
||||
*/
|
||||
private int size;
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code DoubleArrayList} with default capacity.
|
||||
*/
|
||||
DoubleArrayList() {
|
||||
this(new double[DEFAULT_CAPACITY], 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code DoubleArrayList}
|
||||
* containing the same elements as {@code other}.
|
||||
*/
|
||||
private DoubleArrayList(double[] other, int size) {
|
||||
array = other;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof DoubleArrayList)) {
|
||||
return super.equals(o);
|
||||
}
|
||||
DoubleArrayList other = (DoubleArrayList) o;
|
||||
if (size != other.size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final double[] arr = other.array;
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (array[i] != arr[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1;
|
||||
for (int i = 0; i < size; i++) {
|
||||
long bits = Double.doubleToLongBits(array[i]);
|
||||
result = (31 * result) + Internal.hashLong(bits);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DoubleList mutableCopyWithCapacity(int capacity) {
|
||||
if (capacity < size) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return new DoubleArrayList(Arrays.copyOf(array, capacity), size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double get(int index) {
|
||||
return getDouble(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getDouble(int index) {
|
||||
ensureIndexInRange(index);
|
||||
return array[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double set(int index, Double element) {
|
||||
return setDouble(index, element);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double setDouble(int index, double element) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
double previousValue = array[index];
|
||||
array[index] = element;
|
||||
return previousValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, Double element) {
|
||||
addDouble(index, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(Double)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
@Override
|
||||
public void addDouble(double element) {
|
||||
addDouble(size, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(int, Double)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
private void addDouble(int index, double element) {
|
||||
ensureIsMutable();
|
||||
if (index < 0 || index > size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
|
||||
if (size < array.length) {
|
||||
// Shift everything over to make room
|
||||
System.arraycopy(array, index, array, index + 1, size - index);
|
||||
} else {
|
||||
// Resize to 1.5x the size
|
||||
int length = ((size * 3) / 2) + 1;
|
||||
double[] newArray = new double[length];
|
||||
|
||||
// Copy the first part directly
|
||||
System.arraycopy(array, 0, newArray, 0, index);
|
||||
|
||||
// Copy the rest shifted over by one to make room
|
||||
System.arraycopy(array, index, newArray, index + 1, size - index);
|
||||
array = newArray;
|
||||
}
|
||||
|
||||
array[index] = element;
|
||||
size++;
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends Double> collection) {
|
||||
ensureIsMutable();
|
||||
|
||||
if (collection == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
// We specialize when adding another DoubleArrayList to avoid boxing elements.
|
||||
if (!(collection instanceof DoubleArrayList)) {
|
||||
return super.addAll(collection);
|
||||
}
|
||||
|
||||
DoubleArrayList list = (DoubleArrayList) collection;
|
||||
if (list.size == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int overflow = Integer.MAX_VALUE - size;
|
||||
if (overflow < list.size) {
|
||||
// We can't actually represent a list this large.
|
||||
throw new OutOfMemoryError();
|
||||
}
|
||||
|
||||
int newSize = size + list.size;
|
||||
if (newSize > array.length) {
|
||||
array = Arrays.copyOf(array, newSize);
|
||||
}
|
||||
|
||||
System.arraycopy(list.array, 0, array, size, list.size);
|
||||
size = newSize;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
ensureIsMutable();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (o.equals(array[i])) {
|
||||
System.arraycopy(array, i + 1, array, i, size - i);
|
||||
size--;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Double remove(int index) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
double value = array[index];
|
||||
System.arraycopy(array, index + 1, array, index, size - index);
|
||||
size--;
|
||||
modCount++;
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an
|
||||
* {@link IndexOutOfBoundsException} if it is not.
|
||||
*
|
||||
* @param index the index to verify is in range
|
||||
*/
|
||||
private void ensureIndexInRange(int index) {
|
||||
if (index < 0 || index >= size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
}
|
||||
|
||||
private String makeOutOfBoundsExceptionMessage(int index) {
|
||||
return "Index:" + index + ", Size:" + size;
|
||||
}
|
||||
}
|
|
@ -1,454 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `double`.
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.DoubleValue}
|
||||
*/
|
||||
public final class DoubleValue extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.DoubleValue)
|
||||
DoubleValueOrBuilder {
|
||||
// Use DoubleValue.newBuilder() to construct.
|
||||
private DoubleValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private DoubleValue() {
|
||||
value_ = 0D;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private DoubleValue(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 9: {
|
||||
|
||||
value_ = input.readDouble();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 1;
|
||||
private double value_;
|
||||
/**
|
||||
* <pre>
|
||||
* The double value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>double value = 1;</code>
|
||||
*/
|
||||
public double getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (value_ != 0D) {
|
||||
output.writeDouble(1, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (value_ != 0D) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeDoubleSize(1, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (
|
||||
java.lang.Double.doubleToLongBits(getValue())
|
||||
== java.lang.Double.doubleToLongBits(
|
||||
other.getValue()));
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
|
||||
java.lang.Double.doubleToLongBits(getValue()));
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `double`.
|
||||
* The JSON representation for `DoubleValue` is JSON number.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.DoubleValue}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.DoubleValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
value_ = 0D;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_DoubleValue_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue(this);
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue.getDefaultInstance()) return this;
|
||||
if (other.getValue() != 0D) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private double value_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* The double value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>double value = 1;</code>
|
||||
*/
|
||||
public double getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The double value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>double value = 1;</code>
|
||||
*/
|
||||
public Builder setValue(double value) {
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The double value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>double value = 1;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = 0D;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.DoubleValue)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.DoubleValue)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleValue>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<DoubleValue>() {
|
||||
public DoubleValue parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new DoubleValue(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleValue> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<DoubleValue> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface DoubleValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.DoubleValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The double value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>double value = 1;</code>
|
||||
*/
|
||||
double getValue();
|
||||
}
|
|
@ -1,618 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/duration.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* A Duration represents a signed, fixed-length span of time represented
|
||||
* as a count of seconds and fractions of seconds at nanosecond
|
||||
* resolution. It is independent of any calendar and concepts like "day"
|
||||
* or "month". It is related to Timestamp in that the difference between
|
||||
* two Timestamp values is a Duration and it can be added or subtracted
|
||||
* from a Timestamp. Range is approximately +-10,000 years.
|
||||
* Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||
* Timestamp start = ...;
|
||||
* Timestamp end = ...;
|
||||
* Duration duration = ...;
|
||||
* duration.seconds = end.seconds - start.seconds;
|
||||
* duration.nanos = end.nanos - start.nanos;
|
||||
* if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
* duration.seconds += 1;
|
||||
* duration.nanos -= 1000000000;
|
||||
* } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||
* duration.seconds -= 1;
|
||||
* duration.nanos += 1000000000;
|
||||
* }
|
||||
* Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||
* Timestamp start = ...;
|
||||
* Duration duration = ...;
|
||||
* Timestamp end = ...;
|
||||
* end.seconds = start.seconds + duration.seconds;
|
||||
* end.nanos = start.nanos + duration.nanos;
|
||||
* if (end.nanos < 0) {
|
||||
* end.seconds -= 1;
|
||||
* end.nanos += 1000000000;
|
||||
* } else if (end.nanos >= 1000000000) {
|
||||
* end.seconds += 1;
|
||||
* end.nanos -= 1000000000;
|
||||
* }
|
||||
* Example 3: Compute Duration from datetime.timedelta in Python.
|
||||
* td = datetime.timedelta(days=3, minutes=10)
|
||||
* duration = Duration()
|
||||
* duration.FromTimedelta(td)
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Duration}
|
||||
*/
|
||||
public final class Duration extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.Duration)
|
||||
DurationOrBuilder {
|
||||
// Use Duration.newBuilder() to construct.
|
||||
private Duration(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private Duration() {
|
||||
seconds_ = 0L;
|
||||
nanos_ = 0;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private Duration(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
|
||||
seconds_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
case 16: {
|
||||
|
||||
nanos_ = input.readInt32();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.Builder.class);
|
||||
}
|
||||
|
||||
public static final int SECONDS_FIELD_NUMBER = 1;
|
||||
private long seconds_;
|
||||
/**
|
||||
* <pre>
|
||||
* Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
* to +315,576,000,000 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 seconds = 1;</code>
|
||||
*/
|
||||
public long getSeconds() {
|
||||
return seconds_;
|
||||
}
|
||||
|
||||
public static final int NANOS_FIELD_NUMBER = 2;
|
||||
private int nanos_;
|
||||
/**
|
||||
* <pre>
|
||||
* Signed fractions of a second at nanosecond resolution of the span
|
||||
* of time. Durations less than one second are represented with a 0
|
||||
* `seconds` field and a positive or negative `nanos` field. For durations
|
||||
* of one second or more, a non-zero value for the `nanos` field must be
|
||||
* of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
* to +999,999,999 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 nanos = 2;</code>
|
||||
*/
|
||||
public int getNanos() {
|
||||
return nanos_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (seconds_ != 0L) {
|
||||
output.writeInt64(1, seconds_);
|
||||
}
|
||||
if (nanos_ != 0) {
|
||||
output.writeInt32(2, nanos_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (seconds_ != 0L) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeInt64Size(1, seconds_);
|
||||
}
|
||||
if (nanos_ != 0) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeInt32Size(2, nanos_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (getSeconds()
|
||||
== other.getSeconds());
|
||||
result = result && (getNanos()
|
||||
== other.getNanos());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + SECONDS_FIELD_NUMBER;
|
||||
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
|
||||
getSeconds());
|
||||
hash = (37 * hash) + NANOS_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getNanos();
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A Duration represents a signed, fixed-length span of time represented
|
||||
* as a count of seconds and fractions of seconds at nanosecond
|
||||
* resolution. It is independent of any calendar and concepts like "day"
|
||||
* or "month". It is related to Timestamp in that the difference between
|
||||
* two Timestamp values is a Duration and it can be added or subtracted
|
||||
* from a Timestamp. Range is approximately +-10,000 years.
|
||||
* Example 1: Compute Duration from two Timestamps in pseudo code.
|
||||
* Timestamp start = ...;
|
||||
* Timestamp end = ...;
|
||||
* Duration duration = ...;
|
||||
* duration.seconds = end.seconds - start.seconds;
|
||||
* duration.nanos = end.nanos - start.nanos;
|
||||
* if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
* duration.seconds += 1;
|
||||
* duration.nanos -= 1000000000;
|
||||
* } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||
* duration.seconds -= 1;
|
||||
* duration.nanos += 1000000000;
|
||||
* }
|
||||
* Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
|
||||
* Timestamp start = ...;
|
||||
* Duration duration = ...;
|
||||
* Timestamp end = ...;
|
||||
* end.seconds = start.seconds + duration.seconds;
|
||||
* end.nanos = start.nanos + duration.nanos;
|
||||
* if (end.nanos < 0) {
|
||||
* end.seconds -= 1;
|
||||
* end.nanos += 1000000000;
|
||||
* } else if (end.nanos >= 1000000000) {
|
||||
* end.seconds += 1;
|
||||
* end.nanos -= 1000000000;
|
||||
* }
|
||||
* Example 3: Compute Duration from datetime.timedelta in Python.
|
||||
* td = datetime.timedelta(days=3, minutes=10)
|
||||
* duration = Duration()
|
||||
* duration.FromTimedelta(td)
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Duration}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.Duration)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
seconds_ = 0L;
|
||||
|
||||
nanos_ = 0;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.DurationProto.internal_static_google_protobuf_Duration_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration(this);
|
||||
result.seconds_ = seconds_;
|
||||
result.nanos_ = nanos_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration.getDefaultInstance()) return this;
|
||||
if (other.getSeconds() != 0L) {
|
||||
setSeconds(other.getSeconds());
|
||||
}
|
||||
if (other.getNanos() != 0) {
|
||||
setNanos(other.getNanos());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private long seconds_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
* to +315,576,000,000 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 seconds = 1;</code>
|
||||
*/
|
||||
public long getSeconds() {
|
||||
return seconds_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
* to +315,576,000,000 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 seconds = 1;</code>
|
||||
*/
|
||||
public Builder setSeconds(long value) {
|
||||
|
||||
seconds_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
* to +315,576,000,000 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 seconds = 1;</code>
|
||||
*/
|
||||
public Builder clearSeconds() {
|
||||
|
||||
seconds_ = 0L;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
private int nanos_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* Signed fractions of a second at nanosecond resolution of the span
|
||||
* of time. Durations less than one second are represented with a 0
|
||||
* `seconds` field and a positive or negative `nanos` field. For durations
|
||||
* of one second or more, a non-zero value for the `nanos` field must be
|
||||
* of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
* to +999,999,999 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 nanos = 2;</code>
|
||||
*/
|
||||
public int getNanos() {
|
||||
return nanos_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Signed fractions of a second at nanosecond resolution of the span
|
||||
* of time. Durations less than one second are represented with a 0
|
||||
* `seconds` field and a positive or negative `nanos` field. For durations
|
||||
* of one second or more, a non-zero value for the `nanos` field must be
|
||||
* of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
* to +999,999,999 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 nanos = 2;</code>
|
||||
*/
|
||||
public Builder setNanos(int value) {
|
||||
|
||||
nanos_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Signed fractions of a second at nanosecond resolution of the span
|
||||
* of time. Durations less than one second are represented with a 0
|
||||
* `seconds` field and a positive or negative `nanos` field. For durations
|
||||
* of one second or more, a non-zero value for the `nanos` field must be
|
||||
* of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
* to +999,999,999 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 nanos = 2;</code>
|
||||
*/
|
||||
public Builder clearNanos() {
|
||||
|
||||
nanos_ = 0;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.Duration)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.Duration)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Duration>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Duration>() {
|
||||
public Duration parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new Duration(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Duration> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Duration> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/duration.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface DurationOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Duration)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
* to +315,576,000,000 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 seconds = 1;</code>
|
||||
*/
|
||||
long getSeconds();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Signed fractions of a second at nanosecond resolution of the span
|
||||
* of time. Durations less than one second are represented with a 0
|
||||
* `seconds` field and a positive or negative `nanos` field. For durations
|
||||
* of one second or more, a non-zero value for the `nanos` field must be
|
||||
* of the same sign as the `seconds` field. Must be from -999,999,999
|
||||
* to +999,999,999 inclusive.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 nanos = 2;</code>
|
||||
*/
|
||||
int getNanos();
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/duration.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public final class DurationProto {
|
||||
private DurationProto() {}
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
|
||||
}
|
||||
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
|
||||
registerAllExtensions(
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
|
||||
}
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_Duration_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_Duration_fieldAccessorTable;
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\036google/protobuf/duration.proto\022\017google" +
|
||||
".protobuf\"*\n\010Duration\022\017\n\007seconds\030\001 \001(\003\022\r" +
|
||||
"\n\005nanos\030\002 \001(\005B|\n\023com.google.protobufB\rDu" +
|
||||
"rationProtoP\001Z*github.com/golang/protobu" +
|
||||
"f/ptypes/duration\370\001\001\242\002\003GPB\252\002\036Google.Prot" +
|
||||
"obuf.WellKnownTypesb\006proto3"
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
internal_static_google_protobuf_Duration_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_google_protobuf_Duration_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_Duration_descriptor,
|
||||
new java.lang.String[] { "Seconds", "Nanos", });
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
|
@ -1,684 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An implementation of {@link Message} that can represent arbitrary types,
|
||||
* given a {@link Descriptors.Descriptor}.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public final class DynamicMessage extends AbstractMessage {
|
||||
private final Descriptor type;
|
||||
private final FieldSet<FieldDescriptor> fields;
|
||||
private final FieldDescriptor[] oneofCases;
|
||||
private final UnknownFieldSet unknownFields;
|
||||
private int memoizedSize = -1;
|
||||
|
||||
/**
|
||||
* Construct a {@code DynamicMessage} using the given {@code FieldSet}.
|
||||
* oneofCases stores the FieldDescriptor for each oneof to indicate
|
||||
* which field is set. Caller should make sure the array is immutable.
|
||||
*
|
||||
* This constructor is package private and will be used in
|
||||
* {@code DynamicMutableMessage} to convert a mutable message to an immutable
|
||||
* message.
|
||||
*/
|
||||
DynamicMessage(Descriptor type, FieldSet<FieldDescriptor> fields,
|
||||
FieldDescriptor[] oneofCases,
|
||||
UnknownFieldSet unknownFields) {
|
||||
this.type = type;
|
||||
this.fields = fields;
|
||||
this.oneofCases = oneofCases;
|
||||
this.unknownFields = unknownFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@code DynamicMessage} representing the default instance of the
|
||||
* given type.
|
||||
*/
|
||||
public static DynamicMessage getDefaultInstance(Descriptor type) {
|
||||
int oneofDeclCount = type.toProto().getOneofDeclCount();
|
||||
FieldDescriptor[] oneofCases = new FieldDescriptor[oneofDeclCount];
|
||||
return new DynamicMessage(type, FieldSet.<FieldDescriptor>emptySet(),
|
||||
oneofCases,
|
||||
UnknownFieldSet.getDefaultInstance());
|
||||
}
|
||||
|
||||
|
||||
/** Parse a message of the given type from the given input stream. */
|
||||
public static DynamicMessage parseFrom(Descriptor type,
|
||||
CodedInputStream input)
|
||||
throws IOException {
|
||||
return newBuilder(type).mergeFrom(input).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse a message of the given type from the given input stream. */
|
||||
public static DynamicMessage parseFrom(
|
||||
Descriptor type,
|
||||
CodedInputStream input,
|
||||
ExtensionRegistry extensionRegistry)
|
||||
throws IOException {
|
||||
return newBuilder(type).mergeFrom(input, extensionRegistry).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse {@code data} as a message of the given type and return it. */
|
||||
public static DynamicMessage parseFrom(Descriptor type, ByteString data)
|
||||
throws InvalidProtocolBufferException {
|
||||
return newBuilder(type).mergeFrom(data).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse {@code data} as a message of the given type and return it. */
|
||||
public static DynamicMessage parseFrom(Descriptor type, ByteString data,
|
||||
ExtensionRegistry extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return newBuilder(type).mergeFrom(data, extensionRegistry).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse {@code data} as a message of the given type and return it. */
|
||||
public static DynamicMessage parseFrom(Descriptor type, byte[] data)
|
||||
throws InvalidProtocolBufferException {
|
||||
return newBuilder(type).mergeFrom(data).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse {@code data} as a message of the given type and return it. */
|
||||
public static DynamicMessage parseFrom(Descriptor type, byte[] data,
|
||||
ExtensionRegistry extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return newBuilder(type).mergeFrom(data, extensionRegistry).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse a message of the given type from {@code input} and return it. */
|
||||
public static DynamicMessage parseFrom(Descriptor type, InputStream input)
|
||||
throws IOException {
|
||||
return newBuilder(type).mergeFrom(input).buildParsed();
|
||||
}
|
||||
|
||||
/** Parse a message of the given type from {@code input} and return it. */
|
||||
public static DynamicMessage parseFrom(Descriptor type, InputStream input,
|
||||
ExtensionRegistry extensionRegistry)
|
||||
throws IOException {
|
||||
return newBuilder(type).mergeFrom(input, extensionRegistry).buildParsed();
|
||||
}
|
||||
|
||||
/** Construct a {@link Message.Builder} for the given type. */
|
||||
public static Builder newBuilder(Descriptor type) {
|
||||
return new Builder(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@link Message.Builder} for a message of the same type as
|
||||
* {@code prototype}, and initialize it with {@code prototype}'s contents.
|
||||
*/
|
||||
public static Builder newBuilder(Message prototype) {
|
||||
return new Builder(prototype.getDescriptorForType()).mergeFrom(prototype);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Implementation of Message interface.
|
||||
|
||||
@Override
|
||||
public Descriptor getDescriptorForType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DynamicMessage getDefaultInstanceForType() {
|
||||
return getDefaultInstance(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<FieldDescriptor, Object> getAllFields() {
|
||||
return fields.getAllFields();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOneof(OneofDescriptor oneof) {
|
||||
verifyOneofContainingType(oneof);
|
||||
FieldDescriptor field = oneofCases[oneof.getIndex()];
|
||||
if (field == null) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) {
|
||||
verifyOneofContainingType(oneof);
|
||||
return oneofCases[oneof.getIndex()];
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasField(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
return fields.hasField(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getField(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
Object result = fields.getField(field);
|
||||
if (result == null) {
|
||||
if (field.isRepeated()) {
|
||||
result = Collections.emptyList();
|
||||
} else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
|
||||
result = getDefaultInstance(field.getMessageType());
|
||||
} else {
|
||||
result = field.getDefaultValue();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRepeatedFieldCount(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
return fields.getRepeatedFieldCount(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRepeatedField(FieldDescriptor field, int index) {
|
||||
verifyContainingType(field);
|
||||
return fields.getRepeatedField(field, index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnknownFieldSet getUnknownFields() {
|
||||
return unknownFields;
|
||||
}
|
||||
|
||||
static boolean isInitialized(Descriptor type,
|
||||
FieldSet<FieldDescriptor> fields) {
|
||||
// Check that all required fields are present.
|
||||
for (final FieldDescriptor field : type.getFields()) {
|
||||
if (field.isRequired()) {
|
||||
if (!fields.hasField(field)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that embedded messages are initialized.
|
||||
return fields.isInitialized();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInitialized() {
|
||||
return isInitialized(type, fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(CodedOutputStream output) throws IOException {
|
||||
if (type.getOptions().getMessageSetWireFormat()) {
|
||||
fields.writeMessageSetTo(output);
|
||||
unknownFields.writeAsMessageSetTo(output);
|
||||
} else {
|
||||
fields.writeTo(output);
|
||||
unknownFields.writeTo(output);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
if (type.getOptions().getMessageSetWireFormat()) {
|
||||
size = fields.getMessageSetSerializedSize();
|
||||
size += unknownFields.getSerializedSizeAsMessageSet();
|
||||
} else {
|
||||
size = fields.getSerializedSize();
|
||||
size += unknownFields.getSerializedSize();
|
||||
}
|
||||
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder newBuilderForType() {
|
||||
return new Builder(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder toBuilder() {
|
||||
return newBuilderForType().mergeFrom(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Parser<DynamicMessage> getParserForType() {
|
||||
return new AbstractParser<DynamicMessage>() {
|
||||
@Override
|
||||
public DynamicMessage parsePartialFrom(
|
||||
CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
Builder builder = newBuilder(type);
|
||||
try {
|
||||
builder.mergeFrom(input, extensionRegistry);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(builder.buildPartial());
|
||||
} catch (IOException e) {
|
||||
throw new InvalidProtocolBufferException(e)
|
||||
.setUnfinishedMessage(builder.buildPartial());
|
||||
}
|
||||
return builder.buildPartial();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/** Verifies that the field is a field of this message. */
|
||||
private void verifyContainingType(FieldDescriptor field) {
|
||||
if (field.getContainingType() != type) {
|
||||
throw new IllegalArgumentException(
|
||||
"FieldDescriptor does not match message type.");
|
||||
}
|
||||
}
|
||||
|
||||
/** Verifies that the oneof is an oneof of this message. */
|
||||
private void verifyOneofContainingType(OneofDescriptor oneof) {
|
||||
if (oneof.getContainingType() != type) {
|
||||
throw new IllegalArgumentException(
|
||||
"OneofDescriptor does not match message type.");
|
||||
}
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
|
||||
/**
|
||||
* Builder for {@link DynamicMessage}s.
|
||||
*/
|
||||
public static final class Builder extends AbstractMessage.Builder<Builder> {
|
||||
private final Descriptor type;
|
||||
private FieldSet<FieldDescriptor> fields;
|
||||
private final FieldDescriptor[] oneofCases;
|
||||
private UnknownFieldSet unknownFields;
|
||||
|
||||
/** Construct a {@code Builder} for the given type. */
|
||||
private Builder(Descriptor type) {
|
||||
this.type = type;
|
||||
this.fields = FieldSet.newFieldSet();
|
||||
this.unknownFields = UnknownFieldSet.getDefaultInstance();
|
||||
this.oneofCases = new FieldDescriptor[type.toProto().getOneofDeclCount()];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// Implementation of Message.Builder interface.
|
||||
|
||||
@Override
|
||||
public Builder clear() {
|
||||
if (fields.isImmutable()) {
|
||||
fields = FieldSet.newFieldSet();
|
||||
} else {
|
||||
fields.clear();
|
||||
}
|
||||
unknownFields = UnknownFieldSet.getDefaultInstance();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeFrom(Message other) {
|
||||
if (other instanceof DynamicMessage) {
|
||||
// This should be somewhat faster than calling super.mergeFrom().
|
||||
DynamicMessage otherDynamicMessage = (DynamicMessage) other;
|
||||
if (otherDynamicMessage.type != type) {
|
||||
throw new IllegalArgumentException(
|
||||
"mergeFrom(Message) can only merge messages of the same type.");
|
||||
}
|
||||
ensureIsMutable();
|
||||
fields.mergeFrom(otherDynamicMessage.fields);
|
||||
mergeUnknownFields(otherDynamicMessage.unknownFields);
|
||||
for (int i = 0; i < oneofCases.length; i++) {
|
||||
if (oneofCases[i] == null) {
|
||||
oneofCases[i] = otherDynamicMessage.oneofCases[i];
|
||||
} else {
|
||||
if ((otherDynamicMessage.oneofCases[i] != null)
|
||||
&& (oneofCases[i] != otherDynamicMessage.oneofCases[i])) {
|
||||
fields.clearField(oneofCases[i]);
|
||||
oneofCases[i] = otherDynamicMessage.oneofCases[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
} else {
|
||||
return super.mergeFrom(other);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DynamicMessage build() {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
new DynamicMessage(type, fields,
|
||||
java.util.Arrays.copyOf(oneofCases, oneofCases.length), unknownFields));
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for DynamicMessage.parseFrom() methods to call. Throws
|
||||
* {@link InvalidProtocolBufferException} instead of
|
||||
* {@link UninitializedMessageException}.
|
||||
*/
|
||||
private DynamicMessage buildParsed() throws InvalidProtocolBufferException {
|
||||
if (!isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
new DynamicMessage(type, fields,
|
||||
java.util.Arrays.copyOf(oneofCases, oneofCases.length), unknownFields))
|
||||
.asInvalidProtocolBufferException();
|
||||
}
|
||||
return buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DynamicMessage buildPartial() {
|
||||
fields.makeImmutable();
|
||||
DynamicMessage result =
|
||||
new DynamicMessage(type, fields,
|
||||
java.util.Arrays.copyOf(oneofCases, oneofCases.length), unknownFields);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clone() {
|
||||
Builder result = new Builder(type);
|
||||
result.fields.mergeFrom(fields);
|
||||
result.mergeUnknownFields(unknownFields);
|
||||
System.arraycopy(oneofCases, 0, result.oneofCases, 0 , oneofCases.length);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInitialized() {
|
||||
return DynamicMessage.isInitialized(type, fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Descriptor getDescriptorForType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DynamicMessage getDefaultInstanceForType() {
|
||||
return getDefaultInstance(type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<FieldDescriptor, Object> getAllFields() {
|
||||
return fields.getAllFields();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder newBuilderForField(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
|
||||
if (field.getJavaType() != FieldDescriptor.JavaType.MESSAGE) {
|
||||
throw new IllegalArgumentException(
|
||||
"newBuilderForField is only valid for fields with message type.");
|
||||
}
|
||||
|
||||
return new Builder(field.getMessageType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOneof(OneofDescriptor oneof) {
|
||||
verifyOneofContainingType(oneof);
|
||||
FieldDescriptor field = oneofCases[oneof.getIndex()];
|
||||
if (field == null) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) {
|
||||
verifyOneofContainingType(oneof);
|
||||
return oneofCases[oneof.getIndex()];
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clearOneof(OneofDescriptor oneof) {
|
||||
verifyOneofContainingType(oneof);
|
||||
FieldDescriptor field = oneofCases[oneof.getIndex()];
|
||||
if (field != null) {
|
||||
clearField(field);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasField(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
return fields.hasField(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getField(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
Object result = fields.getField(field);
|
||||
if (result == null) {
|
||||
if (field.isRepeated()) {
|
||||
result = Collections.emptyList();
|
||||
} else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
|
||||
result = getDefaultInstance(field.getMessageType());
|
||||
} else {
|
||||
result = field.getDefaultValue();
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder setField(FieldDescriptor field, Object value) {
|
||||
verifyContainingType(field);
|
||||
ensureIsMutable();
|
||||
// TODO(xiaofeng): This check should really be put in FieldSet.setField()
|
||||
// where all other such checks are done. However, currently
|
||||
// FieldSet.setField() permits Integer value for enum fields probably
|
||||
// because of some internal features we support. Should figure it out
|
||||
// and move this check to a more appropriate place.
|
||||
if (field.getType() == FieldDescriptor.Type.ENUM) {
|
||||
ensureEnumValueDescriptor(field, value);
|
||||
}
|
||||
OneofDescriptor oneofDescriptor = field.getContainingOneof();
|
||||
if (oneofDescriptor != null) {
|
||||
int index = oneofDescriptor.getIndex();
|
||||
FieldDescriptor oldField = oneofCases[index];
|
||||
if ((oldField != null) && (oldField != field)) {
|
||||
fields.clearField(oldField);
|
||||
}
|
||||
oneofCases[index] = field;
|
||||
} else if (field.getFile().getSyntax() == Descriptors.FileDescriptor.Syntax.PROTO3) {
|
||||
if (!field.isRepeated()
|
||||
&& field.getJavaType() != FieldDescriptor.JavaType.MESSAGE
|
||||
&& value.equals(field.getDefaultValue())) {
|
||||
// In proto3, setting a field to its default value is equivalent to clearing the field.
|
||||
fields.clearField(field);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
fields.setField(field, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder clearField(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
ensureIsMutable();
|
||||
OneofDescriptor oneofDescriptor = field.getContainingOneof();
|
||||
if (oneofDescriptor != null) {
|
||||
int index = oneofDescriptor.getIndex();
|
||||
if (oneofCases[index] == field) {
|
||||
oneofCases[index] = null;
|
||||
}
|
||||
}
|
||||
fields.clearField(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRepeatedFieldCount(FieldDescriptor field) {
|
||||
verifyContainingType(field);
|
||||
return fields.getRepeatedFieldCount(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRepeatedField(FieldDescriptor field, int index) {
|
||||
verifyContainingType(field);
|
||||
return fields.getRepeatedField(field, index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder setRepeatedField(FieldDescriptor field, int index, Object value) {
|
||||
verifyContainingType(field);
|
||||
ensureIsMutable();
|
||||
fields.setRepeatedField(field, index, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder addRepeatedField(FieldDescriptor field, Object value) {
|
||||
verifyContainingType(field);
|
||||
ensureIsMutable();
|
||||
fields.addRepeatedField(field, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnknownFieldSet getUnknownFields() {
|
||||
return unknownFields;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder setUnknownFields(UnknownFieldSet unknownFields) {
|
||||
if (getDescriptorForType().getFile().getSyntax()
|
||||
== Descriptors.FileDescriptor.Syntax.PROTO3) {
|
||||
// Proto3 discards unknown fields.
|
||||
return this;
|
||||
}
|
||||
this.unknownFields = unknownFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder mergeUnknownFields(UnknownFieldSet unknownFields) {
|
||||
if (getDescriptorForType().getFile().getSyntax()
|
||||
== Descriptors.FileDescriptor.Syntax.PROTO3) {
|
||||
// Proto3 discards unknown fields.
|
||||
return this;
|
||||
}
|
||||
this.unknownFields =
|
||||
UnknownFieldSet.newBuilder(this.unknownFields)
|
||||
.mergeFrom(unknownFields)
|
||||
.build();
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Verifies that the field is a field of this message. */
|
||||
private void verifyContainingType(FieldDescriptor field) {
|
||||
if (field.getContainingType() != type) {
|
||||
throw new IllegalArgumentException(
|
||||
"FieldDescriptor does not match message type.");
|
||||
}
|
||||
}
|
||||
|
||||
/** Verifies that the oneof is an oneof of this message. */
|
||||
private void verifyOneofContainingType(OneofDescriptor oneof) {
|
||||
if (oneof.getContainingType() != type) {
|
||||
throw new IllegalArgumentException(
|
||||
"OneofDescriptor does not match message type.");
|
||||
}
|
||||
}
|
||||
|
||||
/** Verifies that the value is EnumValueDescriptor and matches Enum Type. */
|
||||
private void ensureSingularEnumValueDescriptor(
|
||||
FieldDescriptor field, Object value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
if (!(value instanceof EnumValueDescriptor)) {
|
||||
throw new IllegalArgumentException(
|
||||
"DynamicMessage should use EnumValueDescriptor to set Enum Value.");
|
||||
}
|
||||
// TODO(xiaofeng): Re-enable this check after Orgstore is fixed to not
|
||||
// set incorrect EnumValueDescriptors.
|
||||
// EnumDescriptor fieldType = field.getEnumType();
|
||||
// EnumDescriptor fieldValueType = ((EnumValueDescriptor) value).getType();
|
||||
// if (fieldType != fieldValueType) {
|
||||
// throw new IllegalArgumentException(String.format(
|
||||
// "EnumDescriptor %s of field doesn't match EnumDescriptor %s of field value",
|
||||
// fieldType.getFullName(), fieldValueType.getFullName()));
|
||||
// }
|
||||
}
|
||||
|
||||
/** Verifies the value for an enum field. */
|
||||
private void ensureEnumValueDescriptor(
|
||||
FieldDescriptor field, Object value) {
|
||||
if (field.isRepeated()) {
|
||||
for (Object item : (List) value) {
|
||||
ensureSingularEnumValueDescriptor(field, item);
|
||||
}
|
||||
} else {
|
||||
ensureSingularEnumValueDescriptor(field, value);
|
||||
}
|
||||
}
|
||||
|
||||
private void ensureIsMutable() {
|
||||
if (fields.isImmutable()) {
|
||||
fields = fields.clone();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder getFieldBuilder(FieldDescriptor field) {
|
||||
// TODO(xiangl): need implementation for dynamic message
|
||||
throw new UnsupportedOperationException(
|
||||
"getFieldBuilder() called on a dynamic message type.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder getRepeatedFieldBuilder(FieldDescriptor field,
|
||||
int index) {
|
||||
throw new UnsupportedOperationException(
|
||||
"getRepeatedFieldBuilder() called on a dynamic message type.");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,386 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/empty.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* A generic empty message that you can re-use to avoid defining duplicated
|
||||
* empty messages in your APIs. A typical example is to use it as the request
|
||||
* or the response type of an API method. For instance:
|
||||
* service Foo {
|
||||
* rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
* }
|
||||
* The JSON representation for `Empty` is empty JSON object `{}`.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Empty}
|
||||
*/
|
||||
public final class Empty extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.Empty)
|
||||
EmptyOrBuilder {
|
||||
// Use Empty.newBuilder() to construct.
|
||||
private Empty(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private Empty() {
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private Empty(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.Builder.class);
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty) obj;
|
||||
|
||||
boolean result = true;
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* A generic empty message that you can re-use to avoid defining duplicated
|
||||
* empty messages in your APIs. A typical example is to use it as the request
|
||||
* or the response type of an API method. For instance:
|
||||
* service Foo {
|
||||
* rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
|
||||
* }
|
||||
* The JSON representation for `Empty` is empty JSON object `{}`.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Empty}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.Empty)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.EmptyProto.internal_static_google_protobuf_Empty_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty(this);
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty.getDefaultInstance()) return this;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.Empty)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.Empty)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Empty>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Empty>() {
|
||||
public Empty parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new Empty(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Empty> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Empty> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/empty.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface EmptyOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Empty)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/empty.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public final class EmptyProto {
|
||||
private EmptyProto() {}
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
|
||||
}
|
||||
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
|
||||
registerAllExtensions(
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
|
||||
}
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_Empty_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_Empty_fieldAccessorTable;
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\033google/protobuf/empty.proto\022\017google.pr" +
|
||||
"otobuf\"\007\n\005EmptyBv\n\023com.google.protobufB\n" +
|
||||
"EmptyProtoP\001Z\'github.com/golang/protobuf" +
|
||||
"/ptypes/empty\370\001\001\242\002\003GPB\252\002\036Google.Protobuf" +
|
||||
".WellKnownTypesb\006proto3"
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
internal_static_google_protobuf_Empty_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_google_protobuf_Empty_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_Empty_descriptor,
|
||||
new java.lang.String[] { });
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,157 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/type.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface EnumOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Enum)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Enum type name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 1;</code>
|
||||
*/
|
||||
java.lang.String getName();
|
||||
/**
|
||||
* <pre>
|
||||
* Enum type name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getNameBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value definitions.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue>
|
||||
getEnumvalueList();
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value definitions.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue getEnumvalue(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value definitions.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code>
|
||||
*/
|
||||
int getEnumvalueCount();
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value definitions.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder>
|
||||
getEnumvalueOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value definitions.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.EnumValue enumvalue = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValueOrBuilder getEnumvalueOrBuilder(
|
||||
int index);
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option>
|
||||
getOptionsList();
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
int getOptionsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder>
|
||||
getOptionsOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder(
|
||||
int index);
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The source context.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.SourceContext source_context = 4;</code>
|
||||
*/
|
||||
boolean hasSourceContext();
|
||||
/**
|
||||
* <pre>
|
||||
* The source context.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.SourceContext source_context = 4;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext();
|
||||
/**
|
||||
* <pre>
|
||||
* The source context.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.SourceContext source_context = 4;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The source syntax.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Syntax syntax = 5;</code>
|
||||
*/
|
||||
int getSyntaxValue();
|
||||
/**
|
||||
* <pre>
|
||||
* The source syntax.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Syntax syntax = 5;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax();
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,80 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/type.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface EnumValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.EnumValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 1;</code>
|
||||
*/
|
||||
java.lang.String getName();
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getNameBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Enum value number.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 number = 2;</code>
|
||||
*/
|
||||
int getNumber();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option>
|
||||
getOptionsList();
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
int getOptionsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder>
|
||||
getOptionsOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* Protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 3;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder(
|
||||
int index);
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Indicates a public API that can change at any time, and has no guarantee of API stability and
|
||||
* backward-compatibility.
|
||||
*
|
||||
* <p>Usage guidelines:
|
||||
* <ol>
|
||||
* <li>This annotation is used only on public API. Internal interfaces should not use it.</li>
|
||||
* <li>This annotation should only be added to new APIs. Adding it to an existing API is
|
||||
* considered API-breaking.</li>
|
||||
* <li>Removing this annotation from an API gives it stable status.</li>
|
||||
* </ol>
|
||||
*/
|
||||
@Retention(RetentionPolicy.SOURCE)
|
||||
@Target({
|
||||
ElementType.ANNOTATION_TYPE,
|
||||
ElementType.CONSTRUCTOR,
|
||||
ElementType.FIELD,
|
||||
ElementType.METHOD,
|
||||
ElementType.PACKAGE,
|
||||
ElementType.TYPE})
|
||||
@Documented
|
||||
public @interface ExperimentalApi {
|
||||
/**
|
||||
* Context information such as links to discussion thread, tracking issue etc.
|
||||
*/
|
||||
String value() default "";
|
||||
}
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* Interface that generated extensions implement.
|
||||
*
|
||||
* @author liujisi@google.com (Jisi Liu)
|
||||
*/
|
||||
public abstract class Extension<ContainingType extends MessageLite, Type>
|
||||
extends ExtensionLite<ContainingType, Type> {
|
||||
|
||||
/** Returns the descriptor of the extension. */
|
||||
public abstract Descriptors.FieldDescriptor getDescriptor();
|
||||
|
||||
/** Returns whether or not this extension is a Lite Extension. */
|
||||
@Override
|
||||
final boolean isLite() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// All the methods below are extension implementation details.
|
||||
|
||||
/**
|
||||
* The API type that the extension is used for.
|
||||
*/
|
||||
protected enum ExtensionType {
|
||||
IMMUTABLE,
|
||||
MUTABLE,
|
||||
PROTO1,
|
||||
}
|
||||
|
||||
protected ExtensionType getExtensionType() {
|
||||
// TODO(liujisi): make this abstract after we fix proto1.
|
||||
return ExtensionType.IMMUTABLE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type of a message extension.
|
||||
*/
|
||||
public enum MessageType {
|
||||
PROTO1,
|
||||
PROTO2,
|
||||
}
|
||||
|
||||
/**
|
||||
* If the extension is a message extension (i.e., getLiteType() == MESSAGE),
|
||||
* returns the type of the message, otherwise undefined.
|
||||
*/
|
||||
public MessageType getMessageType() {
|
||||
return MessageType.PROTO2;
|
||||
}
|
||||
|
||||
protected abstract Object fromReflectionType(Object value);
|
||||
protected abstract Object singularFromReflectionType(Object value);
|
||||
protected abstract Object toReflectionType(Object value);
|
||||
protected abstract Object singularToReflectionType(Object value);
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* Lite interface that generated extensions implement.
|
||||
* <p>
|
||||
* Methods are for use by generated code only. You can hold a reference to
|
||||
* extensions using this type name.
|
||||
*/
|
||||
public abstract class ExtensionLite<ContainingType extends MessageLite, Type> {
|
||||
|
||||
/** Returns the field number of the extension. */
|
||||
public abstract int getNumber();
|
||||
|
||||
/** Returns the type of the field. */
|
||||
public abstract WireFormat.FieldType getLiteType();
|
||||
|
||||
/** Returns whether it is a repeated field. */
|
||||
public abstract boolean isRepeated();
|
||||
|
||||
/** Returns the default value of the extension field. */
|
||||
public abstract Type getDefaultValue();
|
||||
|
||||
/**
|
||||
* Returns the default instance of the extension field, if it's a message
|
||||
* extension.
|
||||
*/
|
||||
public abstract MessageLite getMessageDefaultInstance();
|
||||
|
||||
/** Returns whether or not this extension is a Lite Extension. */
|
||||
boolean isLite() {
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -1,396 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A table of known extensions, searchable by name or field number. When
|
||||
* parsing a protocol message that might have extensions, you must provide
|
||||
* an {@code ExtensionRegistry} in which you have registered any extensions
|
||||
* that you want to be able to parse. Otherwise, those extensions will just
|
||||
* be treated like unknown fields.
|
||||
*
|
||||
* <p>For example, if you had the {@code .proto} file:
|
||||
*
|
||||
* <pre>
|
||||
* option java_class = "MyProto";
|
||||
*
|
||||
* message Foo {
|
||||
* extensions 1000 to max;
|
||||
* }
|
||||
*
|
||||
* extend Foo {
|
||||
* optional int32 bar;
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* Then you might write code like:
|
||||
*
|
||||
* <pre>
|
||||
* ExtensionRegistry registry = ExtensionRegistry.newInstance();
|
||||
* registry.add(MyProto.bar);
|
||||
* MyProto.Foo message = MyProto.Foo.parseFrom(input, registry);
|
||||
* </pre>
|
||||
*
|
||||
* <p>Background:
|
||||
*
|
||||
* <p>You might wonder why this is necessary. Two alternatives might come to
|
||||
* mind. First, you might imagine a system where generated extensions are
|
||||
* automatically registered when their containing classes are loaded. This
|
||||
* is a popular technique, but is bad design; among other things, it creates a
|
||||
* situation where behavior can change depending on what classes happen to be
|
||||
* loaded. It also introduces a security vulnerability, because an
|
||||
* unprivileged class could cause its code to be called unexpectedly from a
|
||||
* privileged class by registering itself as an extension of the right type.
|
||||
*
|
||||
* <p>Another option you might consider is lazy parsing: do not parse an
|
||||
* extension until it is first requested, at which point the caller must
|
||||
* provide a type to use. This introduces a different set of problems. First,
|
||||
* it would require a mutex lock any time an extension was accessed, which
|
||||
* would be slow. Second, corrupt data would not be detected until first
|
||||
* access, at which point it would be much harder to deal with it. Third, it
|
||||
* could violate the expectation that message objects are immutable, since the
|
||||
* type provided could be any arbitrary message class. An unprivileged user
|
||||
* could take advantage of this to inject a mutable object into a message
|
||||
* belonging to privileged code and create mischief.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public class ExtensionRegistry extends ExtensionRegistryLite {
|
||||
/** Construct a new, empty instance. */
|
||||
public static ExtensionRegistry newInstance() {
|
||||
return new ExtensionRegistry();
|
||||
}
|
||||
|
||||
/** Get the unmodifiable singleton empty instance. */
|
||||
public static ExtensionRegistry getEmptyRegistry() {
|
||||
return EMPTY_REGISTRY;
|
||||
}
|
||||
|
||||
|
||||
/** Returns an unmodifiable view of the registry. */
|
||||
@Override
|
||||
public ExtensionRegistry getUnmodifiable() {
|
||||
return new ExtensionRegistry(this);
|
||||
}
|
||||
|
||||
/** A (Descriptor, Message) pair, returned by lookup methods. */
|
||||
public static final class ExtensionInfo {
|
||||
/** The extension's descriptor. */
|
||||
public final FieldDescriptor descriptor;
|
||||
|
||||
/**
|
||||
* A default instance of the extension's type, if it has a message type.
|
||||
* Otherwise, {@code null}.
|
||||
*/
|
||||
public final Message defaultInstance;
|
||||
|
||||
private ExtensionInfo(final FieldDescriptor descriptor) {
|
||||
this.descriptor = descriptor;
|
||||
defaultInstance = null;
|
||||
}
|
||||
private ExtensionInfo(final FieldDescriptor descriptor,
|
||||
final Message defaultInstance) {
|
||||
this.descriptor = descriptor;
|
||||
this.defaultInstance = defaultInstance;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deprecated. Use {@link #findImmutableExtensionByName(String)} instead.
|
||||
*/
|
||||
public ExtensionInfo findExtensionByName(final String fullName) {
|
||||
return findImmutableExtensionByName(fullName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an extension for immutable APIs by fully-qualified field name,
|
||||
* in the proto namespace. i.e. {@code result.descriptor.fullName()} will
|
||||
* match {@code fullName} if a match is found.
|
||||
*
|
||||
* @return Information about the extension if found, or {@code null}
|
||||
* otherwise.
|
||||
*/
|
||||
public ExtensionInfo findImmutableExtensionByName(final String fullName) {
|
||||
return immutableExtensionsByName.get(fullName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an extension for mutable APIs by fully-qualified field name,
|
||||
* in the proto namespace. i.e. {@code result.descriptor.fullName()} will
|
||||
* match {@code fullName} if a match is found.
|
||||
*
|
||||
* @return Information about the extension if found, or {@code null}
|
||||
* otherwise.
|
||||
*/
|
||||
public ExtensionInfo findMutableExtensionByName(final String fullName) {
|
||||
return mutableExtensionsByName.get(fullName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deprecated. Use {@link #findImmutableExtensionByNumber(
|
||||
* Descriptors.Descriptor, int)}
|
||||
*/
|
||||
public ExtensionInfo findExtensionByNumber(
|
||||
final Descriptor containingType, final int fieldNumber) {
|
||||
return findImmutableExtensionByNumber(containingType, fieldNumber);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an extension by containing type and field number for immutable APIs.
|
||||
*
|
||||
* @return Information about the extension if found, or {@code null}
|
||||
* otherwise.
|
||||
*/
|
||||
public ExtensionInfo findImmutableExtensionByNumber(
|
||||
final Descriptor containingType, final int fieldNumber) {
|
||||
return immutableExtensionsByNumber.get(
|
||||
new DescriptorIntPair(containingType, fieldNumber));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an extension by containing type and field number for mutable APIs.
|
||||
*
|
||||
* @return Information about the extension if found, or {@code null}
|
||||
* otherwise.
|
||||
*/
|
||||
public ExtensionInfo findMutableExtensionByNumber(
|
||||
final Descriptor containingType, final int fieldNumber) {
|
||||
return mutableExtensionsByNumber.get(
|
||||
new DescriptorIntPair(containingType, fieldNumber));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all extensions for mutable APIs by fully-qualified name of
|
||||
* extended class. Note that this method is more computationally expensive
|
||||
* than getting a single extension by name or number.
|
||||
*
|
||||
* @return Information about the extensions found, or {@code null} if there
|
||||
* are none.
|
||||
*/
|
||||
public Set<ExtensionInfo> getAllMutableExtensionsByExtendedType(final String fullName) {
|
||||
HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>();
|
||||
for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) {
|
||||
if (pair.descriptor.getFullName().equals(fullName)) {
|
||||
extensions.add(mutableExtensionsByNumber.get(pair));
|
||||
}
|
||||
}
|
||||
return extensions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all extensions for immutable APIs by fully-qualified name of
|
||||
* extended class. Note that this method is more computationally expensive
|
||||
* than getting a single extension by name or number.
|
||||
*
|
||||
* @return Information about the extensions found, or {@code null} if there
|
||||
* are none.
|
||||
*/
|
||||
public Set<ExtensionInfo> getAllImmutableExtensionsByExtendedType(final String fullName) {
|
||||
HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>();
|
||||
for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) {
|
||||
if (pair.descriptor.getFullName().equals(fullName)) {
|
||||
extensions.add(immutableExtensionsByNumber.get(pair));
|
||||
}
|
||||
}
|
||||
return extensions;
|
||||
}
|
||||
|
||||
/** Add an extension from a generated file to the registry. */
|
||||
public void add(final Extension<?, ?> extension) {
|
||||
if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE &&
|
||||
extension.getExtensionType() != Extension.ExtensionType.MUTABLE) {
|
||||
// do not support other extension types. ignore
|
||||
return;
|
||||
}
|
||||
add(newExtensionInfo(extension), extension.getExtensionType());
|
||||
}
|
||||
|
||||
/** Add an extension from a generated file to the registry. */
|
||||
public void add(final GeneratedMessage.GeneratedExtension<?, ?> extension) {
|
||||
add((Extension<?, ?>) extension);
|
||||
}
|
||||
|
||||
static ExtensionInfo newExtensionInfo(final Extension<?, ?> extension) {
|
||||
if (extension.getDescriptor().getJavaType() ==
|
||||
FieldDescriptor.JavaType.MESSAGE) {
|
||||
if (extension.getMessageDefaultInstance() == null) {
|
||||
throw new IllegalStateException(
|
||||
"Registered message-type extension had null default instance: " +
|
||||
extension.getDescriptor().getFullName());
|
||||
}
|
||||
return new ExtensionInfo(extension.getDescriptor(),
|
||||
(Message) extension.getMessageDefaultInstance());
|
||||
} else {
|
||||
return new ExtensionInfo(extension.getDescriptor(), null);
|
||||
}
|
||||
}
|
||||
|
||||
/** Add a non-message-type extension to the registry by descriptor. */
|
||||
public void add(final FieldDescriptor type) {
|
||||
if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
|
||||
throw new IllegalArgumentException(
|
||||
"ExtensionRegistry.add() must be provided a default instance when " +
|
||||
"adding an embedded message extension.");
|
||||
}
|
||||
ExtensionInfo info = new ExtensionInfo(type, null);
|
||||
add(info, Extension.ExtensionType.IMMUTABLE);
|
||||
add(info, Extension.ExtensionType.MUTABLE);
|
||||
}
|
||||
|
||||
/** Add a message-type extension to the registry by descriptor. */
|
||||
public void add(final FieldDescriptor type, final Message defaultInstance) {
|
||||
if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) {
|
||||
throw new IllegalArgumentException(
|
||||
"ExtensionRegistry.add() provided a default instance for a " +
|
||||
"non-message extension.");
|
||||
}
|
||||
add(new ExtensionInfo(type, defaultInstance),
|
||||
Extension.ExtensionType.IMMUTABLE);
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Private stuff.
|
||||
|
||||
private ExtensionRegistry() {
|
||||
this.immutableExtensionsByName = new HashMap<String, ExtensionInfo>();
|
||||
this.mutableExtensionsByName = new HashMap<String, ExtensionInfo>();
|
||||
this.immutableExtensionsByNumber =
|
||||
new HashMap<DescriptorIntPair, ExtensionInfo>();
|
||||
this.mutableExtensionsByNumber =
|
||||
new HashMap<DescriptorIntPair, ExtensionInfo>();
|
||||
}
|
||||
|
||||
private ExtensionRegistry(ExtensionRegistry other) {
|
||||
super(other);
|
||||
this.immutableExtensionsByName =
|
||||
Collections.unmodifiableMap(other.immutableExtensionsByName);
|
||||
this.mutableExtensionsByName =
|
||||
Collections.unmodifiableMap(other.mutableExtensionsByName);
|
||||
this.immutableExtensionsByNumber =
|
||||
Collections.unmodifiableMap(other.immutableExtensionsByNumber);
|
||||
this.mutableExtensionsByNumber =
|
||||
Collections.unmodifiableMap(other.mutableExtensionsByNumber);
|
||||
}
|
||||
|
||||
private final Map<String, ExtensionInfo> immutableExtensionsByName;
|
||||
private final Map<String, ExtensionInfo> mutableExtensionsByName;
|
||||
private final Map<DescriptorIntPair, ExtensionInfo> immutableExtensionsByNumber;
|
||||
private final Map<DescriptorIntPair, ExtensionInfo> mutableExtensionsByNumber;
|
||||
|
||||
ExtensionRegistry(boolean empty) {
|
||||
super(EMPTY_REGISTRY_LITE);
|
||||
this.immutableExtensionsByName =
|
||||
Collections.<String, ExtensionInfo>emptyMap();
|
||||
this.mutableExtensionsByName =
|
||||
Collections.<String, ExtensionInfo>emptyMap();
|
||||
this.immutableExtensionsByNumber =
|
||||
Collections.<DescriptorIntPair, ExtensionInfo>emptyMap();
|
||||
this.mutableExtensionsByNumber =
|
||||
Collections.<DescriptorIntPair, ExtensionInfo>emptyMap();
|
||||
}
|
||||
static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true);
|
||||
|
||||
private void add(
|
||||
final ExtensionInfo extension,
|
||||
final Extension.ExtensionType extensionType) {
|
||||
if (!extension.descriptor.isExtension()) {
|
||||
throw new IllegalArgumentException(
|
||||
"ExtensionRegistry.add() was given a FieldDescriptor for a regular " +
|
||||
"(non-extension) field.");
|
||||
}
|
||||
|
||||
Map<String, ExtensionInfo> extensionsByName;
|
||||
Map<DescriptorIntPair, ExtensionInfo> extensionsByNumber;
|
||||
switch (extensionType) {
|
||||
case IMMUTABLE:
|
||||
extensionsByName = immutableExtensionsByName;
|
||||
extensionsByNumber = immutableExtensionsByNumber;
|
||||
break;
|
||||
case MUTABLE:
|
||||
extensionsByName = mutableExtensionsByName;
|
||||
extensionsByNumber = mutableExtensionsByNumber;
|
||||
break;
|
||||
default:
|
||||
// Ignore the unknown supported type.
|
||||
return;
|
||||
}
|
||||
|
||||
extensionsByName.put(extension.descriptor.getFullName(), extension);
|
||||
extensionsByNumber.put(
|
||||
new DescriptorIntPair(extension.descriptor.getContainingType(),
|
||||
extension.descriptor.getNumber()),
|
||||
extension);
|
||||
|
||||
final FieldDescriptor field = extension.descriptor;
|
||||
if (field.getContainingType().getOptions().getMessageSetWireFormat() &&
|
||||
field.getType() == FieldDescriptor.Type.MESSAGE &&
|
||||
field.isOptional() &&
|
||||
field.getExtensionScope() == field.getMessageType()) {
|
||||
// This is an extension of a MessageSet type defined within the extension
|
||||
// type's own scope. For backwards-compatibility, allow it to be looked
|
||||
// up by type name.
|
||||
extensionsByName.put(field.getMessageType().getFullName(), extension);
|
||||
}
|
||||
}
|
||||
|
||||
/** A (GenericDescriptor, int) pair, used as a map key. */
|
||||
private static final class DescriptorIntPair {
|
||||
private final Descriptor descriptor;
|
||||
private final int number;
|
||||
|
||||
DescriptorIntPair(final Descriptor descriptor, final int number) {
|
||||
this.descriptor = descriptor;
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return descriptor.hashCode() * ((1 << 16) - 1) + number;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (!(obj instanceof DescriptorIntPair)) {
|
||||
return false;
|
||||
}
|
||||
final DescriptorIntPair other = (DescriptorIntPair)obj;
|
||||
return descriptor == other.descriptor && number == other.number;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import static org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite.EMPTY_REGISTRY_LITE;
|
||||
|
||||
/**
|
||||
* A factory object to create instances of {@link ExtensionRegistryLite}.
|
||||
*
|
||||
* <p>
|
||||
* This factory detects (via reflection) if the full (non-Lite) protocol buffer libraries
|
||||
* are available, and if so, the instances returned are actually {@link ExtensionRegistry}.
|
||||
*/
|
||||
final class ExtensionRegistryFactory {
|
||||
|
||||
static final String FULL_REGISTRY_CLASS_NAME = "org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry";
|
||||
|
||||
/* Visible for Testing
|
||||
@Nullable */
|
||||
static final Class<?> EXTENSION_REGISTRY_CLASS = reflectExtensionRegistry();
|
||||
|
||||
/* @Nullable */
|
||||
static Class<?> reflectExtensionRegistry() {
|
||||
try {
|
||||
return Class.forName(FULL_REGISTRY_CLASS_NAME);
|
||||
} catch (ClassNotFoundException e) {
|
||||
// The exception allocation is potentially expensive on Android (where it can be triggered
|
||||
// many times at start up). Is there a way to ameliorate this?
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Construct a new, empty instance. */
|
||||
public static ExtensionRegistryLite create() {
|
||||
if (EXTENSION_REGISTRY_CLASS != null) {
|
||||
try {
|
||||
return invokeSubclassFactory("newInstance");
|
||||
} catch (Exception e) {
|
||||
// return a Lite registry.
|
||||
}
|
||||
}
|
||||
return new ExtensionRegistryLite();
|
||||
}
|
||||
|
||||
/** Get the unmodifiable singleton empty instance. */
|
||||
public static ExtensionRegistryLite createEmpty() {
|
||||
if (EXTENSION_REGISTRY_CLASS != null) {
|
||||
try {
|
||||
return invokeSubclassFactory("getEmptyRegistry");
|
||||
} catch (Exception e) {
|
||||
// return a Lite registry.
|
||||
}
|
||||
}
|
||||
return EMPTY_REGISTRY_LITE;
|
||||
}
|
||||
|
||||
static boolean isFullRegistry(ExtensionRegistryLite registry) {
|
||||
return EXTENSION_REGISTRY_CLASS != null
|
||||
&& EXTENSION_REGISTRY_CLASS.isAssignableFrom(registry.getClass());
|
||||
}
|
||||
|
||||
private static final ExtensionRegistryLite invokeSubclassFactory(String methodName)
|
||||
throws Exception {
|
||||
return (ExtensionRegistryLite) EXTENSION_REGISTRY_CLASS
|
||||
.getMethod(methodName).invoke(null);
|
||||
}
|
||||
}
|
|
@ -1,227 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Equivalent to {@link ExtensionRegistry} but supports only "lite" types.
|
||||
* <p>
|
||||
* If all of your types are lite types, then you only need to use
|
||||
* {@code ExtensionRegistryLite}. Similarly, if all your types are regular
|
||||
* types, then you only need {@link ExtensionRegistry}. Typically it does not
|
||||
* make sense to mix the two, since if you have any regular types in your
|
||||
* program, you then require the full runtime and lose all the benefits of
|
||||
* the lite runtime, so you might as well make all your types be regular types.
|
||||
* However, in some cases (e.g. when depending on multiple third-party libraries
|
||||
* where one uses lite types and one uses regular), you may find yourself
|
||||
* wanting to mix the two. In this case things get more complicated.
|
||||
* <p>
|
||||
* There are three factors to consider: Whether the type being extended is
|
||||
* lite, whether the embedded type (in the case of a message-typed extension)
|
||||
* is lite, and whether the extension itself is lite. Since all three are
|
||||
* declared in different files, they could all be different. Here are all
|
||||
* the combinations and which type of registry to use:
|
||||
* <pre>
|
||||
* Extended type Inner type Extension Use registry
|
||||
* =======================================================================
|
||||
* lite lite lite ExtensionRegistryLite
|
||||
* lite regular lite ExtensionRegistry
|
||||
* regular regular regular ExtensionRegistry
|
||||
* all other combinations not supported
|
||||
* </pre>
|
||||
* <p>
|
||||
* Note that just as regular types are not allowed to contain lite-type fields,
|
||||
* they are also not allowed to contain lite-type extensions. This is because
|
||||
* regular types must be fully accessible via reflection, which in turn means
|
||||
* that all the inner messages must also support reflection. On the other hand,
|
||||
* since regular types implement the entire lite interface, there is no problem
|
||||
* with embedding regular types inside lite types.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public class ExtensionRegistryLite {
|
||||
|
||||
// Set true to enable lazy parsing feature for MessageSet.
|
||||
//
|
||||
// TODO(xiangl): Now we use a global flag to control whether enable lazy
|
||||
// parsing feature for MessageSet, which may be too crude for some
|
||||
// applications. Need to support this feature on smaller granularity.
|
||||
private static volatile boolean eagerlyParseMessageSets = false;
|
||||
|
||||
// Visible for testing.
|
||||
static final String EXTENSION_CLASS_NAME = "org.apache.hadoop.hbase.shaded.com.google.protobuf.Extension";
|
||||
|
||||
/* @Nullable */
|
||||
static Class<?> resolveExtensionClass() {
|
||||
try {
|
||||
return Class.forName(EXTENSION_CLASS_NAME);
|
||||
} catch (ClassNotFoundException e) {
|
||||
// See comment in ExtensionRegistryFactory on the potential expense of this.
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/* @Nullable */
|
||||
private static final Class<?> extensionClass = resolveExtensionClass();
|
||||
|
||||
public static boolean isEagerlyParseMessageSets() {
|
||||
return eagerlyParseMessageSets;
|
||||
}
|
||||
|
||||
public static void setEagerlyParseMessageSets(boolean isEagerlyParse) {
|
||||
eagerlyParseMessageSets = isEagerlyParse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new, empty instance.
|
||||
*
|
||||
* <p>This may be an {@code ExtensionRegistry} if the full (non-Lite) proto libraries are
|
||||
* available.
|
||||
*/
|
||||
public static ExtensionRegistryLite newInstance() {
|
||||
return ExtensionRegistryFactory.create();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the unmodifiable singleton empty instance of either ExtensionRegistryLite or
|
||||
* {@code ExtensionRegistry} (if the full (non-Lite) proto libraries are available).
|
||||
*/
|
||||
public static ExtensionRegistryLite getEmptyRegistry() {
|
||||
return ExtensionRegistryFactory.createEmpty();
|
||||
}
|
||||
|
||||
|
||||
/** Returns an unmodifiable view of the registry. */
|
||||
public ExtensionRegistryLite getUnmodifiable() {
|
||||
return new ExtensionRegistryLite(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an extension by containing type and field number.
|
||||
*
|
||||
* @return Information about the extension if found, or {@code null}
|
||||
* otherwise.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public <ContainingType extends MessageLite>
|
||||
GeneratedMessageLite.GeneratedExtension<ContainingType, ?>
|
||||
findLiteExtensionByNumber(
|
||||
final ContainingType containingTypeDefaultInstance,
|
||||
final int fieldNumber) {
|
||||
return (GeneratedMessageLite.GeneratedExtension<ContainingType, ?>)
|
||||
extensionsByNumber.get(
|
||||
new ObjectIntPair(containingTypeDefaultInstance, fieldNumber));
|
||||
}
|
||||
|
||||
/** Add an extension from a lite generated file to the registry. */
|
||||
public final void add(
|
||||
final GeneratedMessageLite.GeneratedExtension<?, ?> extension) {
|
||||
extensionsByNumber.put(
|
||||
new ObjectIntPair(extension.getContainingTypeDefaultInstance(),
|
||||
extension.getNumber()),
|
||||
extension);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an extension from a lite generated file to the registry only if it is
|
||||
* a non-lite extension i.e. {@link GeneratedMessageLite.GeneratedExtension}. */
|
||||
public final void add(ExtensionLite<?, ?> extension) {
|
||||
if (GeneratedMessageLite.GeneratedExtension.class.isAssignableFrom(extension.getClass())) {
|
||||
add((GeneratedMessageLite.GeneratedExtension<?, ?>) extension);
|
||||
}
|
||||
if (ExtensionRegistryFactory.isFullRegistry(this)) {
|
||||
try {
|
||||
this.getClass().getMethod("add", extensionClass).invoke(this, extension);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException(
|
||||
String.format("Could not invoke ExtensionRegistry#add for %s", extension), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Private stuff.
|
||||
|
||||
// Constructors are package-private so that ExtensionRegistry can subclass
|
||||
// this.
|
||||
|
||||
ExtensionRegistryLite() {
|
||||
this.extensionsByNumber =
|
||||
new HashMap<ObjectIntPair,
|
||||
GeneratedMessageLite.GeneratedExtension<?, ?>>();
|
||||
}
|
||||
static final ExtensionRegistryLite EMPTY_REGISTRY_LITE =
|
||||
new ExtensionRegistryLite(true);
|
||||
|
||||
ExtensionRegistryLite(ExtensionRegistryLite other) {
|
||||
if (other == EMPTY_REGISTRY_LITE) {
|
||||
this.extensionsByNumber = Collections.emptyMap();
|
||||
} else {
|
||||
this.extensionsByNumber =
|
||||
Collections.unmodifiableMap(other.extensionsByNumber);
|
||||
}
|
||||
}
|
||||
|
||||
private final Map<ObjectIntPair,
|
||||
GeneratedMessageLite.GeneratedExtension<?, ?>>
|
||||
extensionsByNumber;
|
||||
|
||||
ExtensionRegistryLite(boolean empty) {
|
||||
this.extensionsByNumber = Collections.emptyMap();
|
||||
}
|
||||
|
||||
/** A (Object, int) pair, used as a map key. */
|
||||
private static final class ObjectIntPair {
|
||||
private final Object object;
|
||||
private final int number;
|
||||
|
||||
ObjectIntPair(final Object object, final int number) {
|
||||
this.object = object;
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return System.identityHashCode(object) * ((1 << 16) - 1) + number;
|
||||
}
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (!(obj instanceof ObjectIntPair)) {
|
||||
return false;
|
||||
}
|
||||
final ObjectIntPair other = (ObjectIntPair)obj;
|
||||
return object == other.object && number == other.number;
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,903 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/field_mask.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* `FieldMask` represents a set of symbolic field paths, for example:
|
||||
* paths: "f.a"
|
||||
* paths: "f.b.d"
|
||||
* Here `f` represents a field in some root message, `a` and `b`
|
||||
* fields in the message found in `f`, and `d` a field found in the
|
||||
* message in `f.b`.
|
||||
* Field masks are used to specify a subset of fields that should be
|
||||
* returned by a get operation or modified by an update operation.
|
||||
* Field masks also have a custom JSON encoding (see below).
|
||||
* # Field Masks in Projections
|
||||
* When used in the context of a projection, a response message or
|
||||
* sub-message is filtered by the API to only contain those fields as
|
||||
* specified in the mask. For example, if the mask in the previous
|
||||
* example is applied to a response message as follows:
|
||||
* f {
|
||||
* a : 22
|
||||
* b {
|
||||
* d : 1
|
||||
* x : 2
|
||||
* }
|
||||
* y : 13
|
||||
* }
|
||||
* z: 8
|
||||
* The result will not contain specific values for fields x,y and z
|
||||
* (their value will be set to the default, and omitted in proto text
|
||||
* output):
|
||||
* f {
|
||||
* a : 22
|
||||
* b {
|
||||
* d : 1
|
||||
* }
|
||||
* }
|
||||
* A repeated field is not allowed except at the last position of a
|
||||
* paths string.
|
||||
* If a FieldMask object is not present in a get operation, the
|
||||
* operation applies to all fields (as if a FieldMask of all fields
|
||||
* had been specified).
|
||||
* Note that a field mask does not necessarily apply to the
|
||||
* top-level response message. In case of a REST get operation, the
|
||||
* field mask applies directly to the response, but in case of a REST
|
||||
* list operation, the mask instead applies to each individual message
|
||||
* in the returned resource list. In case of a REST custom method,
|
||||
* other definitions may be used. Where the mask applies will be
|
||||
* clearly documented together with its declaration in the API. In
|
||||
* any case, the effect on the returned resource/resources is required
|
||||
* behavior for APIs.
|
||||
* # Field Masks in Update Operations
|
||||
* A field mask in update operations specifies which fields of the
|
||||
* targeted resource are going to be updated. The API is required
|
||||
* to only change the values of the fields as specified in the mask
|
||||
* and leave the others untouched. If a resource is passed in to
|
||||
* describe the updated values, the API ignores the values of all
|
||||
* fields not covered by the mask.
|
||||
* If a repeated field is specified for an update operation, the existing
|
||||
* repeated values in the target resource will be overwritten by the new values.
|
||||
* Note that a repeated field is only allowed in the last position of a `paths`
|
||||
* string.
|
||||
* If a sub-message is specified in the last position of the field mask for an
|
||||
* update operation, then the existing sub-message in the target resource is
|
||||
* overwritten. Given the target message:
|
||||
* f {
|
||||
* b {
|
||||
* d : 1
|
||||
* x : 2
|
||||
* }
|
||||
* c : 1
|
||||
* }
|
||||
* And an update message:
|
||||
* f {
|
||||
* b {
|
||||
* d : 10
|
||||
* }
|
||||
* }
|
||||
* then if the field mask is:
|
||||
* paths: "f.b"
|
||||
* then the result will be:
|
||||
* f {
|
||||
* b {
|
||||
* d : 10
|
||||
* }
|
||||
* c : 1
|
||||
* }
|
||||
* However, if the update mask was:
|
||||
* paths: "f.b.d"
|
||||
* then the result would be:
|
||||
* f {
|
||||
* b {
|
||||
* d : 10
|
||||
* x : 2
|
||||
* }
|
||||
* c : 1
|
||||
* }
|
||||
* In order to reset a field's value to the default, the field must
|
||||
* be in the mask and set to the default value in the provided resource.
|
||||
* Hence, in order to reset all fields of a resource, provide a default
|
||||
* instance of the resource and set all fields in the mask, or do
|
||||
* not provide a mask as described below.
|
||||
* If a field mask is not present on update, the operation applies to
|
||||
* all fields (as if a field mask of all fields has been specified).
|
||||
* Note that in the presence of schema evolution, this may mean that
|
||||
* fields the client does not know and has therefore not filled into
|
||||
* the request will be reset to their default. If this is unwanted
|
||||
* behavior, a specific service may require a client to always specify
|
||||
* a field mask, producing an error if not.
|
||||
* As with get operations, the location of the resource which
|
||||
* describes the updated values in the request message depends on the
|
||||
* operation kind. In any case, the effect of the field mask is
|
||||
* required to be honored by the API.
|
||||
* ## Considerations for HTTP REST
|
||||
* The HTTP kind of an update operation which uses a field mask must
|
||||
* be set to PATCH instead of PUT in order to satisfy HTTP semantics
|
||||
* (PUT must only be used for full updates).
|
||||
* # JSON Encoding of Field Masks
|
||||
* In JSON, a field mask is encoded as a single string where paths are
|
||||
* separated by a comma. Fields name in each path are converted
|
||||
* to/from lower-camel naming conventions.
|
||||
* As an example, consider the following message declarations:
|
||||
* message Profile {
|
||||
* User user = 1;
|
||||
* Photo photo = 2;
|
||||
* }
|
||||
* message User {
|
||||
* string display_name = 1;
|
||||
* string address = 2;
|
||||
* }
|
||||
* In proto a field mask for `Profile` may look as such:
|
||||
* mask {
|
||||
* paths: "user.display_name"
|
||||
* paths: "photo"
|
||||
* }
|
||||
* In JSON, the same mask is represented as below:
|
||||
* {
|
||||
* mask: "user.displayName,photo"
|
||||
* }
|
||||
* # Field Masks and Oneof Fields
|
||||
* Field masks treat fields in oneofs just as regular fields. Consider the
|
||||
* following message:
|
||||
* message SampleMessage {
|
||||
* oneof test_oneof {
|
||||
* string name = 4;
|
||||
* SubMessage sub_message = 9;
|
||||
* }
|
||||
* }
|
||||
* The field mask can be:
|
||||
* mask {
|
||||
* paths: "name"
|
||||
* }
|
||||
* Or:
|
||||
* mask {
|
||||
* paths: "sub_message"
|
||||
* }
|
||||
* Note that oneof type names ("test_oneof" in this case) cannot be used in
|
||||
* paths.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.FieldMask}
|
||||
*/
|
||||
public final class FieldMask extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.FieldMask)
|
||||
FieldMaskOrBuilder {
|
||||
// Use FieldMask.newBuilder() to construct.
|
||||
private FieldMask(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private FieldMask() {
|
||||
paths_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private FieldMask(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
java.lang.String s = input.readStringRequireUtf8();
|
||||
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
paths_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList();
|
||||
mutable_bitField0_ |= 0x00000001;
|
||||
}
|
||||
paths_.add(s);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
paths_ = paths_.getUnmodifiableView();
|
||||
}
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMaskProto.internal_static_google_protobuf_FieldMask_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMaskProto.internal_static_google_protobuf_FieldMask_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.Builder.class);
|
||||
}
|
||||
|
||||
public static final int PATHS_FIELD_NUMBER = 1;
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList paths_;
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList
|
||||
getPathsList() {
|
||||
return paths_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public int getPathsCount() {
|
||||
return paths_.size();
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public java.lang.String getPaths(int index) {
|
||||
return paths_.get(index);
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getPathsBytes(int index) {
|
||||
return paths_.getByteString(index);
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
for (int i = 0; i < paths_.size(); i++) {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, paths_.getRaw(i));
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
{
|
||||
int dataSize = 0;
|
||||
for (int i = 0; i < paths_.size(); i++) {
|
||||
dataSize += computeStringSizeNoTag(paths_.getRaw(i));
|
||||
}
|
||||
size += dataSize;
|
||||
size += 1 * getPathsList().size();
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && getPathsList()
|
||||
.equals(other.getPathsList());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
if (getPathsCount() > 0) {
|
||||
hash = (37 * hash) + PATHS_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getPathsList().hashCode();
|
||||
}
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* `FieldMask` represents a set of symbolic field paths, for example:
|
||||
* paths: "f.a"
|
||||
* paths: "f.b.d"
|
||||
* Here `f` represents a field in some root message, `a` and `b`
|
||||
* fields in the message found in `f`, and `d` a field found in the
|
||||
* message in `f.b`.
|
||||
* Field masks are used to specify a subset of fields that should be
|
||||
* returned by a get operation or modified by an update operation.
|
||||
* Field masks also have a custom JSON encoding (see below).
|
||||
* # Field Masks in Projections
|
||||
* When used in the context of a projection, a response message or
|
||||
* sub-message is filtered by the API to only contain those fields as
|
||||
* specified in the mask. For example, if the mask in the previous
|
||||
* example is applied to a response message as follows:
|
||||
* f {
|
||||
* a : 22
|
||||
* b {
|
||||
* d : 1
|
||||
* x : 2
|
||||
* }
|
||||
* y : 13
|
||||
* }
|
||||
* z: 8
|
||||
* The result will not contain specific values for fields x,y and z
|
||||
* (their value will be set to the default, and omitted in proto text
|
||||
* output):
|
||||
* f {
|
||||
* a : 22
|
||||
* b {
|
||||
* d : 1
|
||||
* }
|
||||
* }
|
||||
* A repeated field is not allowed except at the last position of a
|
||||
* paths string.
|
||||
* If a FieldMask object is not present in a get operation, the
|
||||
* operation applies to all fields (as if a FieldMask of all fields
|
||||
* had been specified).
|
||||
* Note that a field mask does not necessarily apply to the
|
||||
* top-level response message. In case of a REST get operation, the
|
||||
* field mask applies directly to the response, but in case of a REST
|
||||
* list operation, the mask instead applies to each individual message
|
||||
* in the returned resource list. In case of a REST custom method,
|
||||
* other definitions may be used. Where the mask applies will be
|
||||
* clearly documented together with its declaration in the API. In
|
||||
* any case, the effect on the returned resource/resources is required
|
||||
* behavior for APIs.
|
||||
* # Field Masks in Update Operations
|
||||
* A field mask in update operations specifies which fields of the
|
||||
* targeted resource are going to be updated. The API is required
|
||||
* to only change the values of the fields as specified in the mask
|
||||
* and leave the others untouched. If a resource is passed in to
|
||||
* describe the updated values, the API ignores the values of all
|
||||
* fields not covered by the mask.
|
||||
* If a repeated field is specified for an update operation, the existing
|
||||
* repeated values in the target resource will be overwritten by the new values.
|
||||
* Note that a repeated field is only allowed in the last position of a `paths`
|
||||
* string.
|
||||
* If a sub-message is specified in the last position of the field mask for an
|
||||
* update operation, then the existing sub-message in the target resource is
|
||||
* overwritten. Given the target message:
|
||||
* f {
|
||||
* b {
|
||||
* d : 1
|
||||
* x : 2
|
||||
* }
|
||||
* c : 1
|
||||
* }
|
||||
* And an update message:
|
||||
* f {
|
||||
* b {
|
||||
* d : 10
|
||||
* }
|
||||
* }
|
||||
* then if the field mask is:
|
||||
* paths: "f.b"
|
||||
* then the result will be:
|
||||
* f {
|
||||
* b {
|
||||
* d : 10
|
||||
* }
|
||||
* c : 1
|
||||
* }
|
||||
* However, if the update mask was:
|
||||
* paths: "f.b.d"
|
||||
* then the result would be:
|
||||
* f {
|
||||
* b {
|
||||
* d : 10
|
||||
* x : 2
|
||||
* }
|
||||
* c : 1
|
||||
* }
|
||||
* In order to reset a field's value to the default, the field must
|
||||
* be in the mask and set to the default value in the provided resource.
|
||||
* Hence, in order to reset all fields of a resource, provide a default
|
||||
* instance of the resource and set all fields in the mask, or do
|
||||
* not provide a mask as described below.
|
||||
* If a field mask is not present on update, the operation applies to
|
||||
* all fields (as if a field mask of all fields has been specified).
|
||||
* Note that in the presence of schema evolution, this may mean that
|
||||
* fields the client does not know and has therefore not filled into
|
||||
* the request will be reset to their default. If this is unwanted
|
||||
* behavior, a specific service may require a client to always specify
|
||||
* a field mask, producing an error if not.
|
||||
* As with get operations, the location of the resource which
|
||||
* describes the updated values in the request message depends on the
|
||||
* operation kind. In any case, the effect of the field mask is
|
||||
* required to be honored by the API.
|
||||
* ## Considerations for HTTP REST
|
||||
* The HTTP kind of an update operation which uses a field mask must
|
||||
* be set to PATCH instead of PUT in order to satisfy HTTP semantics
|
||||
* (PUT must only be used for full updates).
|
||||
* # JSON Encoding of Field Masks
|
||||
* In JSON, a field mask is encoded as a single string where paths are
|
||||
* separated by a comma. Fields name in each path are converted
|
||||
* to/from lower-camel naming conventions.
|
||||
* As an example, consider the following message declarations:
|
||||
* message Profile {
|
||||
* User user = 1;
|
||||
* Photo photo = 2;
|
||||
* }
|
||||
* message User {
|
||||
* string display_name = 1;
|
||||
* string address = 2;
|
||||
* }
|
||||
* In proto a field mask for `Profile` may look as such:
|
||||
* mask {
|
||||
* paths: "user.display_name"
|
||||
* paths: "photo"
|
||||
* }
|
||||
* In JSON, the same mask is represented as below:
|
||||
* {
|
||||
* mask: "user.displayName,photo"
|
||||
* }
|
||||
* # Field Masks and Oneof Fields
|
||||
* Field masks treat fields in oneofs just as regular fields. Consider the
|
||||
* following message:
|
||||
* message SampleMessage {
|
||||
* oneof test_oneof {
|
||||
* string name = 4;
|
||||
* SubMessage sub_message = 9;
|
||||
* }
|
||||
* }
|
||||
* The field mask can be:
|
||||
* mask {
|
||||
* paths: "name"
|
||||
* }
|
||||
* Or:
|
||||
* mask {
|
||||
* paths: "sub_message"
|
||||
* }
|
||||
* Note that oneof type names ("test_oneof" in this case) cannot be used in
|
||||
* paths.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.FieldMask}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.FieldMask)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMaskOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMaskProto.internal_static_google_protobuf_FieldMask_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMaskProto.internal_static_google_protobuf_FieldMask_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
paths_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMaskProto.internal_static_google_protobuf_FieldMask_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
paths_ = paths_.getUnmodifiableView();
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
}
|
||||
result.paths_ = paths_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask.getDefaultInstance()) return this;
|
||||
if (!other.paths_.isEmpty()) {
|
||||
if (paths_.isEmpty()) {
|
||||
paths_ = other.paths_;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
} else {
|
||||
ensurePathsIsMutable();
|
||||
paths_.addAll(other.paths_);
|
||||
}
|
||||
onChanged();
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
private int bitField0_;
|
||||
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList paths_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
private void ensurePathsIsMutable() {
|
||||
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
paths_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(paths_);
|
||||
bitField0_ |= 0x00000001;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList
|
||||
getPathsList() {
|
||||
return paths_.getUnmodifiableView();
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public int getPathsCount() {
|
||||
return paths_.size();
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public java.lang.String getPaths(int index) {
|
||||
return paths_.get(index);
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getPathsBytes(int index) {
|
||||
return paths_.getByteString(index);
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public Builder setPaths(
|
||||
int index, java.lang.String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
ensurePathsIsMutable();
|
||||
paths_.set(index, value);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public Builder addPaths(
|
||||
java.lang.String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
ensurePathsIsMutable();
|
||||
paths_.add(value);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public Builder addAllPaths(
|
||||
java.lang.Iterable<java.lang.String> values) {
|
||||
ensurePathsIsMutable();
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
|
||||
values, paths_);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public Builder clearPaths() {
|
||||
paths_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
public Builder addPathsBytes(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
checkByteStringIsUtf8(value);
|
||||
ensurePathsIsMutable();
|
||||
paths_.add(value);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.FieldMask)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.FieldMask)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldMask>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FieldMask>() {
|
||||
public FieldMask parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new FieldMask(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldMask> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FieldMask> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/field_mask.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface FieldMaskOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.FieldMask)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
java.util.List<java.lang.String>
|
||||
getPathsList();
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
int getPathsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
java.lang.String getPaths(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* The set of field mask paths.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated string paths = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getPathsBytes(int index);
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/field_mask.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public final class FieldMaskProto {
|
||||
private FieldMaskProto() {}
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) {
|
||||
}
|
||||
|
||||
public static void registerAllExtensions(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) {
|
||||
registerAllExtensions(
|
||||
(org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry);
|
||||
}
|
||||
static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_google_protobuf_FieldMask_descriptor;
|
||||
static final
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internal_static_google_protobuf_FieldMask_fieldAccessorTable;
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n google/protobuf/field_mask.proto\022\017goog" +
|
||||
"le.protobuf\"\032\n\tFieldMask\022\r\n\005paths\030\001 \003(\tB" +
|
||||
"\211\001\n\023com.google.protobufB\016FieldMaskProtoP" +
|
||||
"\001Z9google.golang.org/genproto/protobuf/f" +
|
||||
"ield_mask;field_mask\242\002\003GPB\252\002\036Google.Prot" +
|
||||
"obuf.WellKnownTypesb\006proto3"
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
return null;
|
||||
}
|
||||
};
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
internal_static_google_protobuf_FieldMask_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_google_protobuf_FieldMask_fieldAccessorTable = new
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
|
||||
internal_static_google_protobuf_FieldMask_descriptor,
|
||||
new java.lang.String[] { "Paths", });
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
|
@ -1,189 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/type.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface FieldOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Field)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The field type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Field.Kind kind = 1;</code>
|
||||
*/
|
||||
int getKindValue();
|
||||
/**
|
||||
* <pre>
|
||||
* The field type.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Field.Kind kind = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Field.Kind getKind();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The field cardinality.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Field.Cardinality cardinality = 2;</code>
|
||||
*/
|
||||
int getCardinalityValue();
|
||||
/**
|
||||
* <pre>
|
||||
* The field cardinality.
|
||||
* </pre>
|
||||
*
|
||||
* <code>.google.protobuf.Field.Cardinality cardinality = 2;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Field.Cardinality getCardinality();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The field number.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 number = 3;</code>
|
||||
*/
|
||||
int getNumber();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The field name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 4;</code>
|
||||
*/
|
||||
java.lang.String getName();
|
||||
/**
|
||||
* <pre>
|
||||
* The field name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string name = 4;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getNameBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The field type URL, without the scheme, for message or enumeration
|
||||
* types. Example: `"type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Timestamp"`.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 6;</code>
|
||||
*/
|
||||
java.lang.String getTypeUrl();
|
||||
/**
|
||||
* <pre>
|
||||
* The field type URL, without the scheme, for message or enumeration
|
||||
* types. Example: `"type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Timestamp"`.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string type_url = 6;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getTypeUrlBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The index of the field type in `Type.oneofs`, for message or enumeration
|
||||
* types. The first type has index 1; zero means the type is not in the list.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 oneof_index = 7;</code>
|
||||
*/
|
||||
int getOneofIndex();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Whether to use alternative packed wire representation.
|
||||
* </pre>
|
||||
*
|
||||
* <code>bool packed = 8;</code>
|
||||
*/
|
||||
boolean getPacked();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 9;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Option>
|
||||
getOptionsList();
|
||||
/**
|
||||
* <pre>
|
||||
* The protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 9;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* The protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 9;</code>
|
||||
*/
|
||||
int getOptionsCount();
|
||||
/**
|
||||
* <pre>
|
||||
* The protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 9;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder>
|
||||
getOptionsOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* The protocol buffer options.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Option options = 9;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder(
|
||||
int index);
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The field JSON name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string json_name = 10;</code>
|
||||
*/
|
||||
java.lang.String getJsonName();
|
||||
/**
|
||||
* <pre>
|
||||
* The field JSON name.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string json_name = 10;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getJsonNameBytes();
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The string value of the default value of this field. Proto2 syntax only.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string default_value = 11;</code>
|
||||
*/
|
||||
java.lang.String getDefaultValue();
|
||||
/**
|
||||
* <pre>
|
||||
* The string value of the default value of this field. Proto2 syntax only.
|
||||
* </pre>
|
||||
*
|
||||
* <code>string default_value = 11;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
|
||||
getDefaultValueBytes();
|
||||
}
|
|
@ -1,909 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyField.LazyIterator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A class which represents an arbitrary set of fields of some message type.
|
||||
* This is used to implement {@link DynamicMessage}, and also to represent
|
||||
* extensions in {@link GeneratedMessage}. This class is package-private,
|
||||
* since outside users should probably be using {@link DynamicMessage}.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
final class FieldSet<FieldDescriptorType extends
|
||||
FieldSet.FieldDescriptorLite<FieldDescriptorType>> {
|
||||
/**
|
||||
* Interface for a FieldDescriptor or lite extension descriptor. This
|
||||
* prevents FieldSet from depending on {@link Descriptors.FieldDescriptor}.
|
||||
*/
|
||||
public interface FieldDescriptorLite<T extends FieldDescriptorLite<T>>
|
||||
extends Comparable<T> {
|
||||
int getNumber();
|
||||
WireFormat.FieldType getLiteType();
|
||||
WireFormat.JavaType getLiteJavaType();
|
||||
boolean isRepeated();
|
||||
boolean isPacked();
|
||||
Internal.EnumLiteMap<?> getEnumType();
|
||||
|
||||
// If getLiteJavaType() == MESSAGE, this merges a message object of the
|
||||
// type into a builder of the type. Returns {@code to}.
|
||||
MessageLite.Builder internalMergeFrom(
|
||||
MessageLite.Builder to, MessageLite from);
|
||||
}
|
||||
|
||||
private final SmallSortedMap<FieldDescriptorType, Object> fields;
|
||||
private boolean isImmutable;
|
||||
private boolean hasLazyField = false;
|
||||
|
||||
/** Construct a new FieldSet. */
|
||||
private FieldSet() {
|
||||
this.fields = SmallSortedMap.newFieldMap(16);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an empty FieldSet. This is only used to initialize
|
||||
* DEFAULT_INSTANCE.
|
||||
*/
|
||||
private FieldSet(final boolean dummy) {
|
||||
this.fields = SmallSortedMap.newFieldMap(0);
|
||||
makeImmutable();
|
||||
}
|
||||
|
||||
/** Construct a new FieldSet. */
|
||||
public static <T extends FieldSet.FieldDescriptorLite<T>>
|
||||
FieldSet<T> newFieldSet() {
|
||||
return new FieldSet<T>();
|
||||
}
|
||||
|
||||
/** Get an immutable empty FieldSet. */
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends FieldSet.FieldDescriptorLite<T>>
|
||||
FieldSet<T> emptySet() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
@SuppressWarnings("rawtypes")
|
||||
private static final FieldSet DEFAULT_INSTANCE = new FieldSet(true);
|
||||
|
||||
/** Make this FieldSet immutable from this point forward. */
|
||||
@SuppressWarnings("unchecked")
|
||||
public void makeImmutable() {
|
||||
if (isImmutable) {
|
||||
return;
|
||||
}
|
||||
fields.makeImmutable();
|
||||
isImmutable = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the FieldSet is immutable. This is true if it is the
|
||||
* {@link #emptySet} or if {@link #makeImmutable} were called.
|
||||
*
|
||||
* @return whether the FieldSet is immutable.
|
||||
*/
|
||||
public boolean isImmutable() {
|
||||
return isImmutable;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(o instanceof FieldSet)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
FieldSet<?> other = (FieldSet<?>) o;
|
||||
return fields.equals(other.fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return fields.hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clones the FieldSet. The returned FieldSet will be mutable even if the
|
||||
* original FieldSet was immutable.
|
||||
*
|
||||
* @return the newly cloned FieldSet
|
||||
*/
|
||||
@Override
|
||||
public FieldSet<FieldDescriptorType> clone() {
|
||||
// We can't just call fields.clone because List objects in the map
|
||||
// should not be shared.
|
||||
FieldSet<FieldDescriptorType> clone = FieldSet.newFieldSet();
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
Map.Entry<FieldDescriptorType, Object> entry = fields.getArrayEntryAt(i);
|
||||
FieldDescriptorType descriptor = entry.getKey();
|
||||
clone.setField(descriptor, entry.getValue());
|
||||
}
|
||||
for (Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
FieldDescriptorType descriptor = entry.getKey();
|
||||
clone.setField(descriptor, entry.getValue());
|
||||
}
|
||||
clone.hasLazyField = hasLazyField;
|
||||
return clone;
|
||||
}
|
||||
|
||||
|
||||
// =================================================================
|
||||
|
||||
/** See {@link Message.Builder#clear()}. */
|
||||
public void clear() {
|
||||
fields.clear();
|
||||
hasLazyField = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a simple map containing all the fields.
|
||||
*/
|
||||
public Map<FieldDescriptorType, Object> getAllFields() {
|
||||
if (hasLazyField) {
|
||||
SmallSortedMap<FieldDescriptorType, Object> result =
|
||||
SmallSortedMap.newFieldMap(16);
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
cloneFieldEntry(result, fields.getArrayEntryAt(i));
|
||||
}
|
||||
for (Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
cloneFieldEntry(result, entry);
|
||||
}
|
||||
if (fields.isImmutable()) {
|
||||
result.makeImmutable();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return fields.isImmutable() ? fields : Collections.unmodifiableMap(fields);
|
||||
}
|
||||
|
||||
private void cloneFieldEntry(Map<FieldDescriptorType, Object> map,
|
||||
Map.Entry<FieldDescriptorType, Object> entry) {
|
||||
FieldDescriptorType key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
if (value instanceof LazyField) {
|
||||
map.put(key, ((LazyField) value).getValue());
|
||||
} else {
|
||||
map.put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an iterator to the field map. This iterator should not be leaked out
|
||||
* of the protobuf library as it is not protected from mutation when fields
|
||||
* is not immutable.
|
||||
*/
|
||||
public Iterator<Map.Entry<FieldDescriptorType, Object>> iterator() {
|
||||
if (hasLazyField) {
|
||||
return new LazyIterator<FieldDescriptorType>(
|
||||
fields.entrySet().iterator());
|
||||
}
|
||||
return fields.entrySet().iterator();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message#hasField(Descriptors.FieldDescriptor)}.
|
||||
*/
|
||||
public boolean hasField(final FieldDescriptorType descriptor) {
|
||||
if (descriptor.isRepeated()) {
|
||||
throw new IllegalArgumentException(
|
||||
"hasField() can only be called on non-repeated fields.");
|
||||
}
|
||||
|
||||
return fields.get(descriptor) != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)}. This method
|
||||
* returns {@code null} if the field is not set; in this case it is up
|
||||
* to the caller to fetch the field's default value.
|
||||
*/
|
||||
public Object getField(final FieldDescriptorType descriptor) {
|
||||
Object o = fields.get(descriptor);
|
||||
if (o instanceof LazyField) {
|
||||
return ((LazyField) o).getValue();
|
||||
}
|
||||
return o;
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message.Builder#setField(Descriptors.FieldDescriptor,Object)}.
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public void setField(final FieldDescriptorType descriptor,
|
||||
Object value) {
|
||||
if (descriptor.isRepeated()) {
|
||||
if (!(value instanceof List)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Wrong object type used with protocol message reflection.");
|
||||
}
|
||||
|
||||
// Wrap the contents in a new list so that the caller cannot change
|
||||
// the list's contents after setting it.
|
||||
final List newList = new ArrayList();
|
||||
newList.addAll((List) value);
|
||||
for (final Object element : newList) {
|
||||
verifyType(descriptor.getLiteType(), element);
|
||||
}
|
||||
value = newList;
|
||||
} else {
|
||||
verifyType(descriptor.getLiteType(), value);
|
||||
}
|
||||
|
||||
if (value instanceof LazyField) {
|
||||
hasLazyField = true;
|
||||
}
|
||||
fields.put(descriptor, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message.Builder#clearField(Descriptors.FieldDescriptor)}.
|
||||
*/
|
||||
public void clearField(final FieldDescriptorType descriptor) {
|
||||
fields.remove(descriptor);
|
||||
if (fields.isEmpty()) {
|
||||
hasLazyField = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message#getRepeatedFieldCount(Descriptors.FieldDescriptor)}.
|
||||
*/
|
||||
public int getRepeatedFieldCount(final FieldDescriptorType descriptor) {
|
||||
if (!descriptor.isRepeated()) {
|
||||
throw new IllegalArgumentException(
|
||||
"getRepeatedField() can only be called on repeated fields.");
|
||||
}
|
||||
|
||||
final Object value = getField(descriptor);
|
||||
if (value == null) {
|
||||
return 0;
|
||||
} else {
|
||||
return ((List<?>) value).size();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message#getRepeatedField(Descriptors.FieldDescriptor,int)}.
|
||||
*/
|
||||
public Object getRepeatedField(final FieldDescriptorType descriptor,
|
||||
final int index) {
|
||||
if (!descriptor.isRepeated()) {
|
||||
throw new IllegalArgumentException(
|
||||
"getRepeatedField() can only be called on repeated fields.");
|
||||
}
|
||||
|
||||
final Object value = getField(descriptor);
|
||||
|
||||
if (value == null) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
} else {
|
||||
return ((List<?>) value).get(index);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message.Builder#setRepeatedField(Descriptors.FieldDescriptor,int,Object)}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public void setRepeatedField(final FieldDescriptorType descriptor,
|
||||
final int index,
|
||||
final Object value) {
|
||||
if (!descriptor.isRepeated()) {
|
||||
throw new IllegalArgumentException(
|
||||
"getRepeatedField() can only be called on repeated fields.");
|
||||
}
|
||||
|
||||
final Object list = getField(descriptor);
|
||||
if (list == null) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
|
||||
verifyType(descriptor.getLiteType(), value);
|
||||
((List<Object>) list).set(index, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Useful for implementing
|
||||
* {@link Message.Builder#addRepeatedField(Descriptors.FieldDescriptor,Object)}.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public void addRepeatedField(final FieldDescriptorType descriptor,
|
||||
final Object value) {
|
||||
if (!descriptor.isRepeated()) {
|
||||
throw new IllegalArgumentException(
|
||||
"addRepeatedField() can only be called on repeated fields.");
|
||||
}
|
||||
|
||||
verifyType(descriptor.getLiteType(), value);
|
||||
|
||||
final Object existingValue = getField(descriptor);
|
||||
List<Object> list;
|
||||
if (existingValue == null) {
|
||||
list = new ArrayList<Object>();
|
||||
fields.put(descriptor, list);
|
||||
} else {
|
||||
list = (List<Object>) existingValue;
|
||||
}
|
||||
|
||||
list.add(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies that the given object is of the correct type to be a valid
|
||||
* value for the given field. (For repeated fields, this checks if the
|
||||
* object is the right type to be one element of the field.)
|
||||
*
|
||||
* @throws IllegalArgumentException The value is not of the right type.
|
||||
*/
|
||||
private static void verifyType(final WireFormat.FieldType type,
|
||||
final Object value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
boolean isValid = false;
|
||||
switch (type.getJavaType()) {
|
||||
case INT: isValid = value instanceof Integer ; break;
|
||||
case LONG: isValid = value instanceof Long ; break;
|
||||
case FLOAT: isValid = value instanceof Float ; break;
|
||||
case DOUBLE: isValid = value instanceof Double ; break;
|
||||
case BOOLEAN: isValid = value instanceof Boolean ; break;
|
||||
case STRING: isValid = value instanceof String ; break;
|
||||
case BYTE_STRING:
|
||||
isValid = value instanceof ByteString || value instanceof byte[];
|
||||
break;
|
||||
case ENUM:
|
||||
// TODO(kenton): Caller must do type checking here, I guess.
|
||||
isValid =
|
||||
(value instanceof Integer || value instanceof Internal.EnumLite);
|
||||
break;
|
||||
case MESSAGE:
|
||||
// TODO(kenton): Caller must do type checking here, I guess.
|
||||
isValid =
|
||||
(value instanceof MessageLite) || (value instanceof LazyField);
|
||||
break;
|
||||
}
|
||||
|
||||
if (!isValid) {
|
||||
// TODO(kenton): When chaining calls to setField(), it can be hard to
|
||||
// tell from the stack trace which exact call failed, since the whole
|
||||
// chain is considered one line of code. It would be nice to print
|
||||
// more information here, e.g. naming the field. We used to do that.
|
||||
// But we can't now that FieldSet doesn't use descriptors. Maybe this
|
||||
// isn't a big deal, though, since it would only really apply when using
|
||||
// reflection and generally people don't chain reflection setters.
|
||||
throw new IllegalArgumentException(
|
||||
"Wrong object type used with protocol message reflection.");
|
||||
}
|
||||
}
|
||||
|
||||
// =================================================================
|
||||
// Parsing and serialization
|
||||
|
||||
/**
|
||||
* See {@link Message#isInitialized()}. Note: Since {@code FieldSet}
|
||||
* itself does not have any way of knowing about required fields that
|
||||
* aren't actually present in the set, it is up to the caller to check
|
||||
* that all required fields are present.
|
||||
*/
|
||||
public boolean isInitialized() {
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
if (!isInitialized(fields.getArrayEntryAt(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (final Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
if (!isInitialized(entry)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private boolean isInitialized(
|
||||
final Map.Entry<FieldDescriptorType, Object> entry) {
|
||||
final FieldDescriptorType descriptor = entry.getKey();
|
||||
if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE) {
|
||||
if (descriptor.isRepeated()) {
|
||||
for (final MessageLite element:
|
||||
(List<MessageLite>) entry.getValue()) {
|
||||
if (!element.isInitialized()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Object value = entry.getValue();
|
||||
if (value instanceof MessageLite) {
|
||||
if (!((MessageLite) value).isInitialized()) {
|
||||
return false;
|
||||
}
|
||||
} else if (value instanceof LazyField) {
|
||||
return true;
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Wrong object type used with protocol message reflection.");
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a field type, return the wire type.
|
||||
*
|
||||
* @returns One of the {@code WIRETYPE_} constants defined in
|
||||
* {@link WireFormat}.
|
||||
*/
|
||||
static int getWireFormatForFieldType(final WireFormat.FieldType type,
|
||||
boolean isPacked) {
|
||||
if (isPacked) {
|
||||
return WireFormat.WIRETYPE_LENGTH_DELIMITED;
|
||||
} else {
|
||||
return type.getWireType();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link Message.Builder#mergeFrom(Message)}, but merges from another
|
||||
* {@link FieldSet}.
|
||||
*/
|
||||
public void mergeFrom(final FieldSet<FieldDescriptorType> other) {
|
||||
for (int i = 0; i < other.fields.getNumArrayEntries(); i++) {
|
||||
mergeFromField(other.fields.getArrayEntryAt(i));
|
||||
}
|
||||
for (final Map.Entry<FieldDescriptorType, Object> entry :
|
||||
other.fields.getOverflowEntries()) {
|
||||
mergeFromField(entry);
|
||||
}
|
||||
}
|
||||
|
||||
private Object cloneIfMutable(Object value) {
|
||||
if (value instanceof byte[]) {
|
||||
byte[] bytes = (byte[]) value;
|
||||
byte[] copy = new byte[bytes.length];
|
||||
System.arraycopy(bytes, 0, copy, 0, bytes.length);
|
||||
return copy;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
private void mergeFromField(
|
||||
final Map.Entry<FieldDescriptorType, Object> entry) {
|
||||
final FieldDescriptorType descriptor = entry.getKey();
|
||||
Object otherValue = entry.getValue();
|
||||
if (otherValue instanceof LazyField) {
|
||||
otherValue = ((LazyField) otherValue).getValue();
|
||||
}
|
||||
|
||||
if (descriptor.isRepeated()) {
|
||||
Object value = getField(descriptor);
|
||||
if (value == null) {
|
||||
value = new ArrayList();
|
||||
}
|
||||
for (Object element : (List) otherValue) {
|
||||
((List) value).add(cloneIfMutable(element));
|
||||
}
|
||||
fields.put(descriptor, value);
|
||||
} else if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE) {
|
||||
Object value = getField(descriptor);
|
||||
if (value == null) {
|
||||
fields.put(descriptor, cloneIfMutable(otherValue));
|
||||
} else {
|
||||
// Merge the messages.
|
||||
value = descriptor.internalMergeFrom(
|
||||
((MessageLite) value).toBuilder(), (MessageLite) otherValue)
|
||||
.build();
|
||||
|
||||
fields.put(descriptor, value);
|
||||
}
|
||||
} else {
|
||||
fields.put(descriptor, cloneIfMutable(otherValue));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(kenton): Move static parsing and serialization methods into some
|
||||
// other class. Probably WireFormat.
|
||||
|
||||
/**
|
||||
* Read a field of any primitive type for immutable messages from a
|
||||
* CodedInputStream. Enums, groups, and embedded messages are not handled by
|
||||
* this method.
|
||||
*
|
||||
* @param input The stream from which to read.
|
||||
* @param type Declared type of the field.
|
||||
* @param checkUtf8 When true, check that the input is valid utf8.
|
||||
* @return An object representing the field's value, of the exact
|
||||
* type which would be returned by
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)} for
|
||||
* this field.
|
||||
*/
|
||||
public static Object readPrimitiveField(
|
||||
CodedInputStream input,
|
||||
final WireFormat.FieldType type,
|
||||
boolean checkUtf8) throws IOException {
|
||||
if (checkUtf8) {
|
||||
return WireFormat.readPrimitiveField(input, type,
|
||||
WireFormat.Utf8Validation.STRICT);
|
||||
} else {
|
||||
return WireFormat.readPrimitiveField(input, type,
|
||||
WireFormat.Utf8Validation.LOOSE);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** See {@link Message#writeTo(CodedOutputStream)}. */
|
||||
public void writeTo(final CodedOutputStream output)
|
||||
throws IOException {
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
final Map.Entry<FieldDescriptorType, Object> entry =
|
||||
fields.getArrayEntryAt(i);
|
||||
writeField(entry.getKey(), entry.getValue(), output);
|
||||
}
|
||||
for (final Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
writeField(entry.getKey(), entry.getValue(), output);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #writeTo} but uses MessageSet wire format.
|
||||
*/
|
||||
public void writeMessageSetTo(final CodedOutputStream output)
|
||||
throws IOException {
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
writeMessageSetTo(fields.getArrayEntryAt(i), output);
|
||||
}
|
||||
for (final Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
writeMessageSetTo(entry, output);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeMessageSetTo(
|
||||
final Map.Entry<FieldDescriptorType, Object> entry,
|
||||
final CodedOutputStream output) throws IOException {
|
||||
final FieldDescriptorType descriptor = entry.getKey();
|
||||
if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE &&
|
||||
!descriptor.isRepeated() && !descriptor.isPacked()) {
|
||||
Object value = entry.getValue();
|
||||
if (value instanceof LazyField) {
|
||||
value = ((LazyField) value).getValue();
|
||||
}
|
||||
output.writeMessageSetExtension(entry.getKey().getNumber(),
|
||||
(MessageLite) value);
|
||||
} else {
|
||||
writeField(descriptor, entry.getValue(), output);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a single tag-value pair to the stream.
|
||||
*
|
||||
* @param output The output stream.
|
||||
* @param type The field's type.
|
||||
* @param number The field's number.
|
||||
* @param value Object representing the field's value. Must be of the exact
|
||||
* type which would be returned by
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)} for
|
||||
* this field.
|
||||
*/
|
||||
static void writeElement(
|
||||
final CodedOutputStream output,
|
||||
final WireFormat.FieldType type,
|
||||
final int number,
|
||||
final Object value) throws IOException {
|
||||
// Special case for groups, which need a start and end tag; other fields
|
||||
// can just use writeTag() and writeFieldNoTag().
|
||||
if (type == WireFormat.FieldType.GROUP) {
|
||||
output.writeGroup(number, (MessageLite) value);
|
||||
} else {
|
||||
output.writeTag(number, getWireFormatForFieldType(type, false));
|
||||
writeElementNoTag(output, type, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a field of arbitrary type, without its tag, to the stream.
|
||||
*
|
||||
* @param output The output stream.
|
||||
* @param type The field's type.
|
||||
* @param value Object representing the field's value. Must be of the exact
|
||||
* type which would be returned by
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)} for
|
||||
* this field.
|
||||
*/
|
||||
static void writeElementNoTag(
|
||||
final CodedOutputStream output,
|
||||
final WireFormat.FieldType type,
|
||||
final Object value) throws IOException {
|
||||
switch (type) {
|
||||
case DOUBLE : output.writeDoubleNoTag ((Double ) value); break;
|
||||
case FLOAT : output.writeFloatNoTag ((Float ) value); break;
|
||||
case INT64 : output.writeInt64NoTag ((Long ) value); break;
|
||||
case UINT64 : output.writeUInt64NoTag ((Long ) value); break;
|
||||
case INT32 : output.writeInt32NoTag ((Integer ) value); break;
|
||||
case FIXED64 : output.writeFixed64NoTag ((Long ) value); break;
|
||||
case FIXED32 : output.writeFixed32NoTag ((Integer ) value); break;
|
||||
case BOOL : output.writeBoolNoTag ((Boolean ) value); break;
|
||||
case GROUP : output.writeGroupNoTag ((MessageLite) value); break;
|
||||
case MESSAGE : output.writeMessageNoTag ((MessageLite) value); break;
|
||||
case STRING:
|
||||
if (value instanceof ByteString) {
|
||||
output.writeBytesNoTag((ByteString) value);
|
||||
} else {
|
||||
output.writeStringNoTag((String) value);
|
||||
}
|
||||
break;
|
||||
case BYTES:
|
||||
if (value instanceof ByteString) {
|
||||
output.writeBytesNoTag((ByteString) value);
|
||||
} else {
|
||||
output.writeByteArrayNoTag((byte[]) value);
|
||||
}
|
||||
break;
|
||||
case UINT32 : output.writeUInt32NoTag ((Integer ) value); break;
|
||||
case SFIXED32: output.writeSFixed32NoTag((Integer ) value); break;
|
||||
case SFIXED64: output.writeSFixed64NoTag((Long ) value); break;
|
||||
case SINT32 : output.writeSInt32NoTag ((Integer ) value); break;
|
||||
case SINT64 : output.writeSInt64NoTag ((Long ) value); break;
|
||||
|
||||
case ENUM:
|
||||
if (value instanceof Internal.EnumLite) {
|
||||
output.writeEnumNoTag(((Internal.EnumLite) value).getNumber());
|
||||
} else {
|
||||
output.writeEnumNoTag(((Integer) value).intValue());
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/** Write a single field. */
|
||||
public static void writeField(final FieldDescriptorLite<?> descriptor,
|
||||
final Object value,
|
||||
final CodedOutputStream output)
|
||||
throws IOException {
|
||||
WireFormat.FieldType type = descriptor.getLiteType();
|
||||
int number = descriptor.getNumber();
|
||||
if (descriptor.isRepeated()) {
|
||||
final List<?> valueList = (List<?>)value;
|
||||
if (descriptor.isPacked()) {
|
||||
output.writeTag(number, WireFormat.WIRETYPE_LENGTH_DELIMITED);
|
||||
// Compute the total data size so the length can be written.
|
||||
int dataSize = 0;
|
||||
for (final Object element : valueList) {
|
||||
dataSize += computeElementSizeNoTag(type, element);
|
||||
}
|
||||
output.writeRawVarint32(dataSize);
|
||||
// Write the data itself, without any tags.
|
||||
for (final Object element : valueList) {
|
||||
writeElementNoTag(output, type, element);
|
||||
}
|
||||
} else {
|
||||
for (final Object element : valueList) {
|
||||
writeElement(output, type, number, element);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (value instanceof LazyField) {
|
||||
writeElement(output, type, number, ((LazyField) value).getValue());
|
||||
} else {
|
||||
writeElement(output, type, number, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* See {@link Message#getSerializedSize()}. It's up to the caller to cache
|
||||
* the resulting size if desired.
|
||||
*/
|
||||
public int getSerializedSize() {
|
||||
int size = 0;
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
final Map.Entry<FieldDescriptorType, Object> entry =
|
||||
fields.getArrayEntryAt(i);
|
||||
size += computeFieldSize(entry.getKey(), entry.getValue());
|
||||
}
|
||||
for (final Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
size += computeFieldSize(entry.getKey(), entry.getValue());
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #getSerializedSize} but uses MessageSet wire format.
|
||||
*/
|
||||
public int getMessageSetSerializedSize() {
|
||||
int size = 0;
|
||||
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
|
||||
size += getMessageSetSerializedSize(fields.getArrayEntryAt(i));
|
||||
}
|
||||
for (final Map.Entry<FieldDescriptorType, Object> entry :
|
||||
fields.getOverflowEntries()) {
|
||||
size += getMessageSetSerializedSize(entry);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
private int getMessageSetSerializedSize(
|
||||
final Map.Entry<FieldDescriptorType, Object> entry) {
|
||||
final FieldDescriptorType descriptor = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE
|
||||
&& !descriptor.isRepeated() && !descriptor.isPacked()) {
|
||||
if (value instanceof LazyField) {
|
||||
return CodedOutputStream.computeLazyFieldMessageSetExtensionSize(
|
||||
entry.getKey().getNumber(), (LazyField) value);
|
||||
} else {
|
||||
return CodedOutputStream.computeMessageSetExtensionSize(
|
||||
entry.getKey().getNumber(), (MessageLite) value);
|
||||
}
|
||||
} else {
|
||||
return computeFieldSize(descriptor, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the number of bytes that would be needed to encode a
|
||||
* single tag/value pair of arbitrary type.
|
||||
*
|
||||
* @param type The field's type.
|
||||
* @param number The field's number.
|
||||
* @param value Object representing the field's value. Must be of the exact
|
||||
* type which would be returned by
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)} for
|
||||
* this field.
|
||||
*/
|
||||
static int computeElementSize(
|
||||
final WireFormat.FieldType type, final int number, final Object value) {
|
||||
int tagSize = CodedOutputStream.computeTagSize(number);
|
||||
if (type == WireFormat.FieldType.GROUP) {
|
||||
// Only count the end group tag for proto2 messages as for proto1 the end
|
||||
// group tag will be counted as a part of getSerializedSize().
|
||||
tagSize *= 2;
|
||||
}
|
||||
return tagSize + computeElementSizeNoTag(type, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the number of bytes that would be needed to encode a
|
||||
* particular value of arbitrary type, excluding tag.
|
||||
*
|
||||
* @param type The field's type.
|
||||
* @param value Object representing the field's value. Must be of the exact
|
||||
* type which would be returned by
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)} for
|
||||
* this field.
|
||||
*/
|
||||
static int computeElementSizeNoTag(
|
||||
final WireFormat.FieldType type, final Object value) {
|
||||
switch (type) {
|
||||
// Note: Minor violation of 80-char limit rule here because this would
|
||||
// actually be harder to read if we wrapped the lines.
|
||||
case DOUBLE : return CodedOutputStream.computeDoubleSizeNoTag ((Double )value);
|
||||
case FLOAT : return CodedOutputStream.computeFloatSizeNoTag ((Float )value);
|
||||
case INT64 : return CodedOutputStream.computeInt64SizeNoTag ((Long )value);
|
||||
case UINT64 : return CodedOutputStream.computeUInt64SizeNoTag ((Long )value);
|
||||
case INT32 : return CodedOutputStream.computeInt32SizeNoTag ((Integer )value);
|
||||
case FIXED64 : return CodedOutputStream.computeFixed64SizeNoTag ((Long )value);
|
||||
case FIXED32 : return CodedOutputStream.computeFixed32SizeNoTag ((Integer )value);
|
||||
case BOOL : return CodedOutputStream.computeBoolSizeNoTag ((Boolean )value);
|
||||
case GROUP : return CodedOutputStream.computeGroupSizeNoTag ((MessageLite)value);
|
||||
case BYTES :
|
||||
if (value instanceof ByteString) {
|
||||
return CodedOutputStream.computeBytesSizeNoTag((ByteString) value);
|
||||
} else {
|
||||
return CodedOutputStream.computeByteArraySizeNoTag((byte[]) value);
|
||||
}
|
||||
case STRING :
|
||||
if (value instanceof ByteString) {
|
||||
return CodedOutputStream.computeBytesSizeNoTag((ByteString) value);
|
||||
} else {
|
||||
return CodedOutputStream.computeStringSizeNoTag((String) value);
|
||||
}
|
||||
case UINT32 : return CodedOutputStream.computeUInt32SizeNoTag ((Integer )value);
|
||||
case SFIXED32: return CodedOutputStream.computeSFixed32SizeNoTag((Integer )value);
|
||||
case SFIXED64: return CodedOutputStream.computeSFixed64SizeNoTag((Long )value);
|
||||
case SINT32 : return CodedOutputStream.computeSInt32SizeNoTag ((Integer )value);
|
||||
case SINT64 : return CodedOutputStream.computeSInt64SizeNoTag ((Long )value);
|
||||
|
||||
case MESSAGE:
|
||||
if (value instanceof LazyField) {
|
||||
return CodedOutputStream.computeLazyFieldSizeNoTag((LazyField) value);
|
||||
} else {
|
||||
return CodedOutputStream.computeMessageSizeNoTag((MessageLite) value);
|
||||
}
|
||||
|
||||
case ENUM:
|
||||
if (value instanceof Internal.EnumLite) {
|
||||
return CodedOutputStream.computeEnumSizeNoTag(
|
||||
((Internal.EnumLite) value).getNumber());
|
||||
} else {
|
||||
return CodedOutputStream.computeEnumSizeNoTag((Integer) value);
|
||||
}
|
||||
}
|
||||
|
||||
throw new RuntimeException(
|
||||
"There is no way to get here, but the compiler thinks otherwise.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the number of bytes needed to encode a particular field.
|
||||
*/
|
||||
public static int computeFieldSize(final FieldDescriptorLite<?> descriptor,
|
||||
final Object value) {
|
||||
WireFormat.FieldType type = descriptor.getLiteType();
|
||||
int number = descriptor.getNumber();
|
||||
if (descriptor.isRepeated()) {
|
||||
if (descriptor.isPacked()) {
|
||||
int dataSize = 0;
|
||||
for (final Object element : (List<?>)value) {
|
||||
dataSize += computeElementSizeNoTag(type, element);
|
||||
}
|
||||
return dataSize +
|
||||
CodedOutputStream.computeTagSize(number) +
|
||||
CodedOutputStream.computeRawVarint32Size(dataSize);
|
||||
} else {
|
||||
int size = 0;
|
||||
for (final Object element : (List<?>)value) {
|
||||
size += computeElementSize(type, number, element);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
} else {
|
||||
return computeElementSize(type, number, value);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,272 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.FloatList;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An implementation of {@link FloatList} on top of a primitive array.
|
||||
*
|
||||
* @author dweis@google.com (Daniel Weis)
|
||||
*/
|
||||
final class FloatArrayList
|
||||
extends AbstractProtobufList<Float>
|
||||
implements FloatList, RandomAccess {
|
||||
|
||||
private static final FloatArrayList EMPTY_LIST = new FloatArrayList();
|
||||
static {
|
||||
EMPTY_LIST.makeImmutable();
|
||||
}
|
||||
|
||||
public static FloatArrayList emptyList() {
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
/**
|
||||
* The backing store for the list.
|
||||
*/
|
||||
private float[] array;
|
||||
|
||||
/**
|
||||
* The size of the list distinct from the length of the array. That is, it is the number of
|
||||
* elements set in the list.
|
||||
*/
|
||||
private int size;
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code FloatArrayList} with default capacity.
|
||||
*/
|
||||
FloatArrayList() {
|
||||
this(new float[DEFAULT_CAPACITY], 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code FloatArrayList}
|
||||
* containing the same elements as {@code other}.
|
||||
*/
|
||||
private FloatArrayList(float[] other, int size) {
|
||||
array = other;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof FloatArrayList)) {
|
||||
return super.equals(o);
|
||||
}
|
||||
FloatArrayList other = (FloatArrayList) o;
|
||||
if (size != other.size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final float[] arr = other.array;
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (array[i] != arr[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1;
|
||||
for (int i = 0; i < size; i++) {
|
||||
result = (31 * result) + Float.floatToIntBits(array[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FloatList mutableCopyWithCapacity(int capacity) {
|
||||
if (capacity < size) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return new FloatArrayList(Arrays.copyOf(array, capacity), size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float get(int index) {
|
||||
return getFloat(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getFloat(int index) {
|
||||
ensureIndexInRange(index);
|
||||
return array[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float set(int index, Float element) {
|
||||
return setFloat(index, element);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float setFloat(int index, float element) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
float previousValue = array[index];
|
||||
array[index] = element;
|
||||
return previousValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, Float element) {
|
||||
addFloat(index, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(Float)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
@Override
|
||||
public void addFloat(float element) {
|
||||
addFloat(size, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(int, Float)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
private void addFloat(int index, float element) {
|
||||
ensureIsMutable();
|
||||
if (index < 0 || index > size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
|
||||
if (size < array.length) {
|
||||
// Shift everything over to make room
|
||||
System.arraycopy(array, index, array, index + 1, size - index);
|
||||
} else {
|
||||
// Resize to 1.5x the size
|
||||
int length = ((size * 3) / 2) + 1;
|
||||
float[] newArray = new float[length];
|
||||
|
||||
// Copy the first part directly
|
||||
System.arraycopy(array, 0, newArray, 0, index);
|
||||
|
||||
// Copy the rest shifted over by one to make room
|
||||
System.arraycopy(array, index, newArray, index + 1, size - index);
|
||||
array = newArray;
|
||||
}
|
||||
|
||||
array[index] = element;
|
||||
size++;
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends Float> collection) {
|
||||
ensureIsMutable();
|
||||
|
||||
if (collection == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
// We specialize when adding another FloatArrayList to avoid boxing elements.
|
||||
if (!(collection instanceof FloatArrayList)) {
|
||||
return super.addAll(collection);
|
||||
}
|
||||
|
||||
FloatArrayList list = (FloatArrayList) collection;
|
||||
if (list.size == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int overflow = Integer.MAX_VALUE - size;
|
||||
if (overflow < list.size) {
|
||||
// We can't actually represent a list this large.
|
||||
throw new OutOfMemoryError();
|
||||
}
|
||||
|
||||
int newSize = size + list.size;
|
||||
if (newSize > array.length) {
|
||||
array = Arrays.copyOf(array, newSize);
|
||||
}
|
||||
|
||||
System.arraycopy(list.array, 0, array, size, list.size);
|
||||
size = newSize;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
ensureIsMutable();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (o.equals(array[i])) {
|
||||
System.arraycopy(array, i + 1, array, i, size - i);
|
||||
size--;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float remove(int index) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
float value = array[index];
|
||||
System.arraycopy(array, index + 1, array, index, size - index);
|
||||
size--;
|
||||
modCount++;
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an
|
||||
* {@link IndexOutOfBoundsException} if it is not.
|
||||
*
|
||||
* @param index the index to verify is in range
|
||||
*/
|
||||
private void ensureIndexInRange(int index) {
|
||||
if (index < 0 || index >= size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
}
|
||||
|
||||
private String makeOutOfBoundsExceptionMessage(int index) {
|
||||
return "Index:" + index + ", Size:" + size;
|
||||
}
|
||||
}
|
|
@ -1,454 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `float`.
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.FloatValue}
|
||||
*/
|
||||
public final class FloatValue extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.FloatValue)
|
||||
FloatValueOrBuilder {
|
||||
// Use FloatValue.newBuilder() to construct.
|
||||
private FloatValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private FloatValue() {
|
||||
value_ = 0F;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private FloatValue(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 13: {
|
||||
|
||||
value_ = input.readFloat();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_FloatValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_FloatValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 1;
|
||||
private float value_;
|
||||
/**
|
||||
* <pre>
|
||||
* The float value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>float value = 1;</code>
|
||||
*/
|
||||
public float getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (value_ != 0F) {
|
||||
output.writeFloat(1, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (value_ != 0F) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeFloatSize(1, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (
|
||||
java.lang.Float.floatToIntBits(getValue())
|
||||
== java.lang.Float.floatToIntBits(
|
||||
other.getValue()));
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + java.lang.Float.floatToIntBits(
|
||||
getValue());
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `float`.
|
||||
* The JSON representation for `FloatValue` is JSON number.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.FloatValue}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.FloatValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_FloatValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_FloatValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
value_ = 0F;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_FloatValue_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue(this);
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue.getDefaultInstance()) return this;
|
||||
if (other.getValue() != 0F) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private float value_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* The float value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>float value = 1;</code>
|
||||
*/
|
||||
public float getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The float value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>float value = 1;</code>
|
||||
*/
|
||||
public Builder setValue(float value) {
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The float value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>float value = 1;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = 0F;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.FloatValue)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.FloatValue)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FloatValue>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<FloatValue>() {
|
||||
public FloatValue parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new FloatValue(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FloatValue> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<FloatValue> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface FloatValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.FloatValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The float value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>float value = 1;</code>
|
||||
*/
|
||||
float getValue();
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,451 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `int32`.
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Int32Value}
|
||||
*/
|
||||
public final class Int32Value extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.Int32Value)
|
||||
Int32ValueOrBuilder {
|
||||
// Use Int32Value.newBuilder() to construct.
|
||||
private Int32Value(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private Int32Value() {
|
||||
value_ = 0;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private Int32Value(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
|
||||
value_ = input.readInt32();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 1;
|
||||
private int value_;
|
||||
/**
|
||||
* <pre>
|
||||
* The int32 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 value = 1;</code>
|
||||
*/
|
||||
public int getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (value_ != 0) {
|
||||
output.writeInt32(1, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (value_ != 0) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeInt32Size(1, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (getValue()
|
||||
== other.getValue());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getValue();
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `int32`.
|
||||
* The JSON representation for `Int32Value` is JSON number.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Int32Value}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.Int32Value)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32ValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
value_ = 0;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int32Value_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value(this);
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value.getDefaultInstance()) return this;
|
||||
if (other.getValue() != 0) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private int value_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* The int32 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 value = 1;</code>
|
||||
*/
|
||||
public int getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The int32 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 value = 1;</code>
|
||||
*/
|
||||
public Builder setValue(int value) {
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The int32 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 value = 1;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = 0;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.Int32Value)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.Int32Value)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Int32Value>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Int32Value>() {
|
||||
public Int32Value parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new Int32Value(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Int32Value> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Int32Value> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface Int32ValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Int32Value)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The int32 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int32 value = 1;</code>
|
||||
*/
|
||||
int getValue();
|
||||
}
|
|
@ -1,452 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `int64`.
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Int64Value}
|
||||
*/
|
||||
public final class Int64Value extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.Int64Value)
|
||||
Int64ValueOrBuilder {
|
||||
// Use Int64Value.newBuilder() to construct.
|
||||
private Int64Value(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private Int64Value() {
|
||||
value_ = 0L;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private Int64Value(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
|
||||
value_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUE_FIELD_NUMBER = 1;
|
||||
private long value_;
|
||||
/**
|
||||
* <pre>
|
||||
* The int64 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 value = 1;</code>
|
||||
*/
|
||||
public long getValue() {
|
||||
return value_;
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
if (value_ != 0L) {
|
||||
output.writeInt64(1, value_);
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (value_ != 0L) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeInt64Size(1, value_);
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (getValue()
|
||||
== other.getValue());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
hash = (37 * hash) + VALUE_FIELD_NUMBER;
|
||||
hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong(
|
||||
getValue());
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Wrapper message for `int64`.
|
||||
* The JSON representation for `Int64Value` is JSON string.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.Int64Value}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.Int64Value)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64ValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
value_ = 0L;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_Int64Value_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value(this);
|
||||
result.value_ = value_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value.getDefaultInstance()) return this;
|
||||
if (other.getValue() != 0L) {
|
||||
setValue(other.getValue());
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private long value_ ;
|
||||
/**
|
||||
* <pre>
|
||||
* The int64 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 value = 1;</code>
|
||||
*/
|
||||
public long getValue() {
|
||||
return value_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The int64 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 value = 1;</code>
|
||||
*/
|
||||
public Builder setValue(long value) {
|
||||
|
||||
value_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* The int64 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 value = 1;</code>
|
||||
*/
|
||||
public Builder clearValue() {
|
||||
|
||||
value_ = 0L;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.Int64Value)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.Int64Value)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Int64Value>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<Int64Value>() {
|
||||
public Int64Value parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new Int64Value(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Int64Value> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<Int64Value> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/wrappers.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface Int64ValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.Int64Value)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* The int64 value.
|
||||
* </pre>
|
||||
*
|
||||
* <code>int64 value = 1;</code>
|
||||
*/
|
||||
long getValue();
|
||||
}
|
|
@ -1,272 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.IntList;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An implementation of {@link IntList} on top of a primitive array.
|
||||
*
|
||||
* @author dweis@google.com (Daniel Weis)
|
||||
*/
|
||||
final class IntArrayList
|
||||
extends AbstractProtobufList<Integer>
|
||||
implements IntList, RandomAccess {
|
||||
|
||||
private static final IntArrayList EMPTY_LIST = new IntArrayList();
|
||||
static {
|
||||
EMPTY_LIST.makeImmutable();
|
||||
}
|
||||
|
||||
public static IntArrayList emptyList() {
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
/**
|
||||
* The backing store for the list.
|
||||
*/
|
||||
private int[] array;
|
||||
|
||||
/**
|
||||
* The size of the list distinct from the length of the array. That is, it is the number of
|
||||
* elements set in the list.
|
||||
*/
|
||||
private int size;
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code IntArrayList} with default capacity.
|
||||
*/
|
||||
IntArrayList() {
|
||||
this(new int[DEFAULT_CAPACITY], 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code IntArrayList}
|
||||
* containing the same elements as {@code other}.
|
||||
*/
|
||||
private IntArrayList(int[] other, int size) {
|
||||
array = other;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof IntArrayList)) {
|
||||
return super.equals(o);
|
||||
}
|
||||
IntArrayList other = (IntArrayList) o;
|
||||
if (size != other.size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final int[] arr = other.array;
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (array[i] != arr[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1;
|
||||
for (int i = 0; i < size; i++) {
|
||||
result = (31 * result) + array[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IntList mutableCopyWithCapacity(int capacity) {
|
||||
if (capacity < size) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return new IntArrayList(Arrays.copyOf(array, capacity), size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer get(int index) {
|
||||
return getInt(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getInt(int index) {
|
||||
ensureIndexInRange(index);
|
||||
return array[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer set(int index, Integer element) {
|
||||
return setInt(index, element);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int setInt(int index, int element) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
int previousValue = array[index];
|
||||
array[index] = element;
|
||||
return previousValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, Integer element) {
|
||||
addInt(index, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(Integer)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
@Override
|
||||
public void addInt(int element) {
|
||||
addInt(size, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(int, Integer)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
private void addInt(int index, int element) {
|
||||
ensureIsMutable();
|
||||
if (index < 0 || index > size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
|
||||
if (size < array.length) {
|
||||
// Shift everything over to make room
|
||||
System.arraycopy(array, index, array, index + 1, size - index);
|
||||
} else {
|
||||
// Resize to 1.5x the size
|
||||
int length = ((size * 3) / 2) + 1;
|
||||
int[] newArray = new int[length];
|
||||
|
||||
// Copy the first part directly
|
||||
System.arraycopy(array, 0, newArray, 0, index);
|
||||
|
||||
// Copy the rest shifted over by one to make room
|
||||
System.arraycopy(array, index, newArray, index + 1, size - index);
|
||||
array = newArray;
|
||||
}
|
||||
|
||||
array[index] = element;
|
||||
size++;
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends Integer> collection) {
|
||||
ensureIsMutable();
|
||||
|
||||
if (collection == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
// We specialize when adding another IntArrayList to avoid boxing elements.
|
||||
if (!(collection instanceof IntArrayList)) {
|
||||
return super.addAll(collection);
|
||||
}
|
||||
|
||||
IntArrayList list = (IntArrayList) collection;
|
||||
if (list.size == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int overflow = Integer.MAX_VALUE - size;
|
||||
if (overflow < list.size) {
|
||||
// We can't actually represent a list this large.
|
||||
throw new OutOfMemoryError();
|
||||
}
|
||||
|
||||
int newSize = size + list.size;
|
||||
if (newSize > array.length) {
|
||||
array = Arrays.copyOf(array, newSize);
|
||||
}
|
||||
|
||||
System.arraycopy(list.array, 0, array, size, list.size);
|
||||
size = newSize;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
ensureIsMutable();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (o.equals(array[i])) {
|
||||
System.arraycopy(array, i + 1, array, i, size - i);
|
||||
size--;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer remove(int index) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
int value = array[index];
|
||||
System.arraycopy(array, index + 1, array, index, size - index);
|
||||
size--;
|
||||
modCount++;
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an
|
||||
* {@link IndexOutOfBoundsException} if it is not.
|
||||
*
|
||||
* @param index the index to verify is in range
|
||||
*/
|
||||
private void ensureIndexInRange(int index) {
|
||||
if (index < 0 || index >= size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
}
|
||||
|
||||
private String makeOutOfBoundsExceptionMessage(int index) {
|
||||
return "Index:" + index + ", Size:" + size;
|
||||
}
|
||||
}
|
|
@ -1,751 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.AbstractList;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.AbstractSet;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.RandomAccess;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* The classes contained within are used internally by the Protocol Buffer
|
||||
* library and generated message implementations. They are public only because
|
||||
* those generated messages do not reside in the {@code protobuf} package.
|
||||
* Others should not use this class directly.
|
||||
*
|
||||
* @author kenton@google.com (Kenton Varda)
|
||||
*/
|
||||
public final class Internal {
|
||||
|
||||
private Internal() {}
|
||||
|
||||
static final Charset UTF_8 = Charset.forName("UTF-8");
|
||||
static final Charset ISO_8859_1 = Charset.forName("ISO-8859-1");
|
||||
|
||||
/**
|
||||
* Throws an appropriate {@link NullPointerException} if the given objects is {@code null}.
|
||||
*/
|
||||
static <T> T checkNotNull(T obj, String message) {
|
||||
if (obj == null) {
|
||||
throw new NullPointerException(message);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper called by generated code to construct default values for string
|
||||
* fields.
|
||||
* <p>
|
||||
* The protocol compiler does not actually contain a UTF-8 decoder -- it
|
||||
* just pushes UTF-8-encoded text around without touching it. The one place
|
||||
* where this presents a problem is when generating Java string literals.
|
||||
* Unicode characters in the string literal would normally need to be encoded
|
||||
* using a Unicode escape sequence, which would require decoding them.
|
||||
* To get around this, protoc instead embeds the UTF-8 bytes into the
|
||||
* generated code and leaves it to the runtime library to decode them.
|
||||
* <p>
|
||||
* It gets worse, though. If protoc just generated a byte array, like:
|
||||
* new byte[] {0x12, 0x34, 0x56, 0x78}
|
||||
* Java actually generates *code* which allocates an array and then fills
|
||||
* in each value. This is much less efficient than just embedding the bytes
|
||||
* directly into the bytecode. To get around this, we need another
|
||||
* work-around. String literals are embedded directly, so protoc actually
|
||||
* generates a string literal corresponding to the bytes. The easiest way
|
||||
* to do this is to use the ISO-8859-1 character set, which corresponds to
|
||||
* the first 256 characters of the Unicode range. Protoc can then use
|
||||
* good old CEscape to generate the string.
|
||||
* <p>
|
||||
* So we have a string literal which represents a set of bytes which
|
||||
* represents another string. This function -- stringDefaultValue --
|
||||
* converts from the generated string to the string we actually want. The
|
||||
* generated code calls this automatically.
|
||||
*/
|
||||
public static String stringDefaultValue(String bytes) {
|
||||
return new String(bytes.getBytes(ISO_8859_1), UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper called by generated code to construct default values for bytes
|
||||
* fields.
|
||||
* <p>
|
||||
* This is a lot like {@link #stringDefaultValue}, but for bytes fields.
|
||||
* In this case we only need the second of the two hacks -- allowing us to
|
||||
* embed raw bytes as a string literal with ISO-8859-1 encoding.
|
||||
*/
|
||||
public static ByteString bytesDefaultValue(String bytes) {
|
||||
return ByteString.copyFrom(bytes.getBytes(ISO_8859_1));
|
||||
}
|
||||
/**
|
||||
* Helper called by generated code to construct default values for bytes
|
||||
* fields.
|
||||
* <p>
|
||||
* This is like {@link #bytesDefaultValue}, but returns a byte array.
|
||||
*/
|
||||
public static byte[] byteArrayDefaultValue(String bytes) {
|
||||
return bytes.getBytes(ISO_8859_1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper called by generated code to construct default values for bytes
|
||||
* fields.
|
||||
* <p>
|
||||
* This is like {@link #bytesDefaultValue}, but returns a ByteBuffer.
|
||||
*/
|
||||
public static ByteBuffer byteBufferDefaultValue(String bytes) {
|
||||
return ByteBuffer.wrap(byteArrayDefaultValue(bytes));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new ByteBuffer and copy all the content of {@code source}
|
||||
* ByteBuffer to the new ByteBuffer. The new ByteBuffer's limit and
|
||||
* capacity will be source.capacity(), and its position will be 0.
|
||||
* Note that the state of {@code source} ByteBuffer won't be changed.
|
||||
*/
|
||||
public static ByteBuffer copyByteBuffer(ByteBuffer source) {
|
||||
// Make a duplicate of the source ByteBuffer and read data from the
|
||||
// duplicate. This is to avoid affecting the source ByteBuffer's state.
|
||||
ByteBuffer temp = source.duplicate();
|
||||
// We want to copy all the data in the source ByteBuffer, not just the
|
||||
// remaining bytes.
|
||||
temp.clear();
|
||||
ByteBuffer result = ByteBuffer.allocate(temp.capacity());
|
||||
result.put(temp);
|
||||
result.clear();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper called by generated code to determine if a byte array is a valid
|
||||
* UTF-8 encoded string such that the original bytes can be converted to
|
||||
* a String object and then back to a byte array round tripping the bytes
|
||||
* without loss. More precisely, returns {@code true} whenever:
|
||||
* <pre> {@code
|
||||
* Arrays.equals(byteString.toByteArray(),
|
||||
* new String(byteString.toByteArray(), "UTF-8").getBytes("UTF-8"))
|
||||
* }</pre>
|
||||
*
|
||||
* <p>This method rejects "overlong" byte sequences, as well as
|
||||
* 3-byte sequences that would map to a surrogate character, in
|
||||
* accordance with the restricted definition of UTF-8 introduced in
|
||||
* Unicode 3.1. Note that the UTF-8 decoder included in Oracle's
|
||||
* JDK has been modified to also reject "overlong" byte sequences,
|
||||
* but currently (2011) still accepts 3-byte surrogate character
|
||||
* byte sequences.
|
||||
*
|
||||
* <p>See the Unicode Standard,<br>
|
||||
* Table 3-6. <em>UTF-8 Bit Distribution</em>,<br>
|
||||
* Table 3-7. <em>Well Formed UTF-8 Byte Sequences</em>.
|
||||
*
|
||||
* <p>As of 2011-02, this method simply returns the result of {@link
|
||||
* ByteString#isValidUtf8()}. Calling that method directly is preferred.
|
||||
*
|
||||
* @param byteString the string to check
|
||||
* @return whether the byte array is round trippable
|
||||
*/
|
||||
public static boolean isValidUtf8(ByteString byteString) {
|
||||
return byteString.isValidUtf8();
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #isValidUtf8(ByteString)} but for byte arrays.
|
||||
*/
|
||||
public static boolean isValidUtf8(byte[] byteArray) {
|
||||
return Utf8.isValidUtf8(byteArray);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to get the UTF-8 bytes of a string.
|
||||
*/
|
||||
public static byte[] toByteArray(String value) {
|
||||
return value.getBytes(UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to convert a byte array to a string using UTF-8 encoding.
|
||||
*/
|
||||
public static String toStringUtf8(byte[] bytes) {
|
||||
return new String(bytes, UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for an enum value or value descriptor, to be used in FieldSet.
|
||||
* The lite library stores enum values directly in FieldSets but the full
|
||||
* library stores EnumValueDescriptors in order to better support reflection.
|
||||
*/
|
||||
public interface EnumLite {
|
||||
int getNumber();
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for an object which maps integers to {@link EnumLite}s.
|
||||
* {@link Descriptors.EnumDescriptor} implements this interface by mapping
|
||||
* numbers to {@link Descriptors.EnumValueDescriptor}s. Additionally,
|
||||
* every generated enum type has a static method internalGetValueMap() which
|
||||
* returns an implementation of this type that maps numbers to enum values.
|
||||
*/
|
||||
public interface EnumLiteMap<T extends EnumLite> {
|
||||
T findValueByNumber(int number);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for longs.
|
||||
* @see Long#hashCode()
|
||||
*/
|
||||
public static int hashLong(long n) {
|
||||
return (int) (n ^ (n >>> 32));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for
|
||||
* booleans.
|
||||
* @see Boolean#hashCode()
|
||||
*/
|
||||
public static int hashBoolean(boolean b) {
|
||||
return b ? 1231 : 1237;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for enums.
|
||||
* <p>
|
||||
* This is needed because {@link java.lang.Enum#hashCode()} is final, but we
|
||||
* need to use the field number as the hash code to ensure compatibility
|
||||
* between statically and dynamically generated enum objects.
|
||||
*/
|
||||
public static int hashEnum(EnumLite e) {
|
||||
return e.getNumber();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for
|
||||
* enum lists.
|
||||
*/
|
||||
public static int hashEnumList(List<? extends EnumLite> list) {
|
||||
int hash = 1;
|
||||
for (EnumLite e : list) {
|
||||
hash = 31 * hash + hashEnum(e);
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#equals(Object)} for bytes field.
|
||||
*/
|
||||
public static boolean equals(List<byte[]> a, List<byte[]> b) {
|
||||
if (a.size() != b.size()) return false;
|
||||
for (int i = 0; i < a.size(); ++i) {
|
||||
if (!Arrays.equals(a.get(i), b.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for bytes field.
|
||||
*/
|
||||
public static int hashCode(List<byte[]> list) {
|
||||
int hash = 1;
|
||||
for (byte[] bytes : list) {
|
||||
hash = 31 * hash + hashCode(bytes);
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for bytes field.
|
||||
*/
|
||||
public static int hashCode(byte[] bytes) {
|
||||
// The hash code for a byte array should be the same as the hash code for a
|
||||
// ByteString with the same content. This is to ensure that the generated
|
||||
// hashCode() method will return the same value as the pure reflection
|
||||
// based hashCode() method.
|
||||
return Internal.hashCode(bytes, 0, bytes.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link LiteralByteString#hashCode()}.
|
||||
*/
|
||||
static int hashCode(byte[] bytes, int offset, int length) {
|
||||
// The hash code for a byte array should be the same as the hash code for a
|
||||
// ByteString with the same content. This is to ensure that the generated
|
||||
// hashCode() method will return the same value as the pure reflection
|
||||
// based hashCode() method.
|
||||
int h = Internal.partialHash(length, bytes, offset, length);
|
||||
return h == 0 ? 1 : h;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for continuously hashing bytes.
|
||||
*/
|
||||
static int partialHash(int h, byte[] bytes, int offset, int length) {
|
||||
for (int i = offset; i < offset + length; i++) {
|
||||
h = h * 31 + bytes[i];
|
||||
}
|
||||
return h;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#equals(Object)} for bytes
|
||||
* field.
|
||||
*/
|
||||
public static boolean equalsByteBuffer(ByteBuffer a, ByteBuffer b) {
|
||||
if (a.capacity() != b.capacity()) {
|
||||
return false;
|
||||
}
|
||||
// ByteBuffer.equals() will only compare the remaining bytes, but we want to
|
||||
// compare all the content.
|
||||
return a.duplicate().clear().equals(b.duplicate().clear());
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#equals(Object)} for bytes
|
||||
* field.
|
||||
*/
|
||||
public static boolean equalsByteBuffer(
|
||||
List<ByteBuffer> a, List<ByteBuffer> b) {
|
||||
if (a.size() != b.size()) {
|
||||
return false;
|
||||
}
|
||||
for (int i = 0; i < a.size(); ++i) {
|
||||
if (!equalsByteBuffer(a.get(i), b.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for bytes
|
||||
* field.
|
||||
*/
|
||||
public static int hashCodeByteBuffer(List<ByteBuffer> list) {
|
||||
int hash = 1;
|
||||
for (ByteBuffer bytes : list) {
|
||||
hash = 31 * hash + hashCodeByteBuffer(bytes);
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
private static final int DEFAULT_BUFFER_SIZE = 4096;
|
||||
|
||||
/**
|
||||
* Helper method for implementing {@link Message#hashCode()} for bytes
|
||||
* field.
|
||||
*/
|
||||
public static int hashCodeByteBuffer(ByteBuffer bytes) {
|
||||
if (bytes.hasArray()) {
|
||||
// Fast path.
|
||||
int h = partialHash(bytes.capacity(), bytes.array(), bytes.arrayOffset(), bytes.capacity());
|
||||
return h == 0 ? 1 : h;
|
||||
} else {
|
||||
// Read the data into a temporary byte array before calculating the
|
||||
// hash value.
|
||||
final int bufferSize = bytes.capacity() > DEFAULT_BUFFER_SIZE
|
||||
? DEFAULT_BUFFER_SIZE : bytes.capacity();
|
||||
final byte[] buffer = new byte[bufferSize];
|
||||
final ByteBuffer duplicated = bytes.duplicate();
|
||||
duplicated.clear();
|
||||
int h = bytes.capacity();
|
||||
while (duplicated.remaining() > 0) {
|
||||
final int length = duplicated.remaining() <= bufferSize ?
|
||||
duplicated.remaining() : bufferSize;
|
||||
duplicated.get(buffer, 0, length);
|
||||
h = partialHash(h, buffer, 0, length);
|
||||
}
|
||||
return h == 0 ? 1 : h;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends MessageLite> T getDefaultInstance(Class<T> clazz) {
|
||||
try {
|
||||
Method method = clazz.getMethod("getDefaultInstance");
|
||||
return (T) method.invoke(method);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
"Failed to get default instance for " + clazz, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An empty byte array constant used in generated code.
|
||||
*/
|
||||
public static final byte[] EMPTY_BYTE_ARRAY = new byte[0];
|
||||
|
||||
/**
|
||||
* An empty byte array constant used in generated code.
|
||||
*/
|
||||
public static final ByteBuffer EMPTY_BYTE_BUFFER =
|
||||
ByteBuffer.wrap(EMPTY_BYTE_ARRAY);
|
||||
|
||||
/** An empty coded input stream constant used in generated code. */
|
||||
public static final CodedInputStream EMPTY_CODED_INPUT_STREAM =
|
||||
CodedInputStream.newInstance(EMPTY_BYTE_ARRAY);
|
||||
|
||||
|
||||
/**
|
||||
* Provides an immutable view of {@code List<T>} around a {@code List<F>}.
|
||||
*
|
||||
* Protobuf internal. Used in protobuf generated code only.
|
||||
*/
|
||||
public static class ListAdapter<F, T> extends AbstractList<T> {
|
||||
/**
|
||||
* Convert individual elements of the List from F to T.
|
||||
*/
|
||||
public interface Converter<F, T> {
|
||||
T convert(F from);
|
||||
}
|
||||
|
||||
private final List<F> fromList;
|
||||
private final Converter<F, T> converter;
|
||||
|
||||
public ListAdapter(List<F> fromList, Converter<F, T> converter) {
|
||||
this.fromList = fromList;
|
||||
this.converter = converter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T get(int index) {
|
||||
return converter.convert(fromList.get(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return fromList.size();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap around a {@code Map<K, RealValue>} and provide a {@code Map<K, V>}
|
||||
* interface.
|
||||
*/
|
||||
public static class MapAdapter<K, V, RealValue> extends AbstractMap<K, V> {
|
||||
/**
|
||||
* An interface used to convert between two types.
|
||||
*/
|
||||
public interface Converter<A, B> {
|
||||
B doForward(A object);
|
||||
A doBackward(B object);
|
||||
}
|
||||
|
||||
public static <T extends EnumLite> Converter<Integer, T> newEnumConverter(
|
||||
final EnumLiteMap<T> enumMap, final T unrecognizedValue) {
|
||||
return new Converter<Integer, T>() {
|
||||
@Override
|
||||
public T doForward(Integer value) {
|
||||
T result = enumMap.findValueByNumber(value);
|
||||
return result == null ? unrecognizedValue : result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer doBackward(T value) {
|
||||
return value.getNumber();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private final Map<K, RealValue> realMap;
|
||||
private final Converter<RealValue, V> valueConverter;
|
||||
|
||||
public MapAdapter(Map<K, RealValue> realMap,
|
||||
Converter<RealValue, V> valueConverter) {
|
||||
this.realMap = realMap;
|
||||
this.valueConverter = valueConverter;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public V get(Object key) {
|
||||
RealValue result = realMap.get(key);
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return valueConverter.doForward(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V put(K key, V value) {
|
||||
RealValue oldValue = realMap.put(key, valueConverter.doBackward(value));
|
||||
if (oldValue == null) {
|
||||
return null;
|
||||
}
|
||||
return valueConverter.doForward(oldValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<java.util.Map.Entry<K, V>> entrySet() {
|
||||
return new SetAdapter(realMap.entrySet());
|
||||
}
|
||||
|
||||
private class SetAdapter extends AbstractSet<Map.Entry<K, V>> {
|
||||
private final Set<Map.Entry<K, RealValue>> realSet;
|
||||
public SetAdapter(Set<Map.Entry<K, RealValue>> realSet) {
|
||||
this.realSet = realSet;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<java.util.Map.Entry<K, V>> iterator() {
|
||||
return new IteratorAdapter(realSet.iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return realSet.size();
|
||||
}
|
||||
}
|
||||
|
||||
private class IteratorAdapter implements Iterator<Map.Entry<K, V>> {
|
||||
private final Iterator<Map.Entry<K, RealValue>> realIterator;
|
||||
|
||||
public IteratorAdapter(
|
||||
Iterator<Map.Entry<K, RealValue>> realIterator) {
|
||||
this.realIterator = realIterator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return realIterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public java.util.Map.Entry<K, V> next() {
|
||||
return new EntryAdapter(realIterator.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
realIterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
private class EntryAdapter implements Map.Entry<K, V> {
|
||||
private final Map.Entry<K, RealValue> realEntry;
|
||||
|
||||
public EntryAdapter(Map.Entry<K, RealValue> realEntry) {
|
||||
this.realEntry = realEntry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public K getKey() {
|
||||
return realEntry.getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public V getValue() {
|
||||
return valueConverter.doForward(realEntry.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public V setValue(V value) {
|
||||
RealValue oldValue = realEntry.setValue(
|
||||
valueConverter.doBackward(value));
|
||||
if (oldValue == null) {
|
||||
return null;
|
||||
}
|
||||
return valueConverter.doForward(oldValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extends {@link List} to add the capability to make the list immutable and inspect if it is
|
||||
* modifiable.
|
||||
* <p>
|
||||
* All implementations must support efficient random access.
|
||||
*/
|
||||
public static interface ProtobufList<E> extends List<E>, RandomAccess {
|
||||
|
||||
/**
|
||||
* Makes this list immutable. All subsequent modifications will throw an
|
||||
* {@link UnsupportedOperationException}.
|
||||
*/
|
||||
void makeImmutable();
|
||||
|
||||
/**
|
||||
* Returns whether this list can be modified via the publicly accessible {@link List} methods.
|
||||
*/
|
||||
boolean isModifiable();
|
||||
|
||||
/**
|
||||
* Returns a mutable clone of this list with the specified capacity.
|
||||
*/
|
||||
ProtobufList<E> mutableCopyWithCapacity(int capacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link java.util.List} implementation that avoids boxing the elements into Integers if
|
||||
* possible. Does not support null elements.
|
||||
*/
|
||||
public static interface IntList extends ProtobufList<Integer> {
|
||||
|
||||
/**
|
||||
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
|
||||
*/
|
||||
int getInt(int index);
|
||||
|
||||
/**
|
||||
* Like {@link #add(Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
void addInt(int element);
|
||||
|
||||
/**
|
||||
* Like {@link #set(int, Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
int setInt(int index, int element);
|
||||
|
||||
/**
|
||||
* Returns a mutable clone of this list with the specified capacity.
|
||||
*/
|
||||
@Override
|
||||
IntList mutableCopyWithCapacity(int capacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link java.util.List} implementation that avoids boxing the elements into Booleans if
|
||||
* possible. Does not support null elements.
|
||||
*/
|
||||
public static interface BooleanList extends ProtobufList<Boolean> {
|
||||
|
||||
/**
|
||||
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
|
||||
*/
|
||||
boolean getBoolean(int index);
|
||||
|
||||
/**
|
||||
* Like {@link #add(Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
void addBoolean(boolean element);
|
||||
|
||||
/**
|
||||
* Like {@link #set(int, Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
boolean setBoolean(int index, boolean element);
|
||||
|
||||
/**
|
||||
* Returns a mutable clone of this list with the specified capacity.
|
||||
*/
|
||||
@Override
|
||||
BooleanList mutableCopyWithCapacity(int capacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link java.util.List} implementation that avoids boxing the elements into Longs if
|
||||
* possible. Does not support null elements.
|
||||
*/
|
||||
public static interface LongList extends ProtobufList<Long> {
|
||||
|
||||
/**
|
||||
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
|
||||
*/
|
||||
long getLong(int index);
|
||||
|
||||
/**
|
||||
* Like {@link #add(Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
void addLong(long element);
|
||||
|
||||
/**
|
||||
* Like {@link #set(int, Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
long setLong(int index, long element);
|
||||
|
||||
/**
|
||||
* Returns a mutable clone of this list with the specified capacity.
|
||||
*/
|
||||
@Override
|
||||
LongList mutableCopyWithCapacity(int capacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link java.util.List} implementation that avoids boxing the elements into Doubles if
|
||||
* possible. Does not support null elements.
|
||||
*/
|
||||
public static interface DoubleList extends ProtobufList<Double> {
|
||||
|
||||
/**
|
||||
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
|
||||
*/
|
||||
double getDouble(int index);
|
||||
|
||||
/**
|
||||
* Like {@link #add(Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
void addDouble(double element);
|
||||
|
||||
/**
|
||||
* Like {@link #set(int, Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
double setDouble(int index, double element);
|
||||
|
||||
/**
|
||||
* Returns a mutable clone of this list with the specified capacity.
|
||||
*/
|
||||
@Override
|
||||
DoubleList mutableCopyWithCapacity(int capacity);
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link java.util.List} implementation that avoids boxing the elements into Floats if
|
||||
* possible. Does not support null elements.
|
||||
*/
|
||||
public static interface FloatList extends ProtobufList<Float> {
|
||||
|
||||
/**
|
||||
* Like {@link #get(int)} but more efficient in that it doesn't box the returned value.
|
||||
*/
|
||||
float getFloat(int index);
|
||||
|
||||
/**
|
||||
* Like {@link #add(Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
void addFloat(float element);
|
||||
|
||||
/**
|
||||
* Like {@link #set(int, Object)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
float setFloat(int index, float element);
|
||||
|
||||
/**
|
||||
* Returns a mutable clone of this list with the specified capacity.
|
||||
*/
|
||||
@Override
|
||||
FloatList mutableCopyWithCapacity(int capacity);
|
||||
}
|
||||
}
|
|
@ -1,146 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown when a protocol message being parsed is invalid in some way,
|
||||
* e.g. it contains a malformed varint or a negative byte length.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public class InvalidProtocolBufferException extends IOException {
|
||||
private static final long serialVersionUID = -1616151763072450476L;
|
||||
private MessageLite unfinishedMessage = null;
|
||||
|
||||
public InvalidProtocolBufferException(final String description) {
|
||||
super(description);
|
||||
}
|
||||
|
||||
public InvalidProtocolBufferException(IOException e) {
|
||||
super(e.getMessage(), e);
|
||||
}
|
||||
|
||||
/**
|
||||
* Attaches an unfinished message to the exception to support best-effort
|
||||
* parsing in {@code Parser} interface.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
public InvalidProtocolBufferException setUnfinishedMessage(
|
||||
MessageLite unfinishedMessage) {
|
||||
this.unfinishedMessage = unfinishedMessage;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the unfinished message attached to the exception, or null if
|
||||
* no message is attached.
|
||||
*/
|
||||
public MessageLite getUnfinishedMessage() {
|
||||
return unfinishedMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unwraps the underlying {@link IOException} if this exception was caused by an I/O
|
||||
* problem. Otherwise, returns {@code this}.
|
||||
*/
|
||||
public IOException unwrapIOException() {
|
||||
return getCause() instanceof IOException ? (IOException) getCause() : this;
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException truncatedMessage() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"While parsing a protocol message, the input ended unexpectedly " +
|
||||
"in the middle of a field. This could mean either that the " +
|
||||
"input has been truncated or that an embedded message " +
|
||||
"misreported its own length.");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException negativeSize() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"CodedInputStream encountered an embedded string or message " +
|
||||
"which claimed to have negative size.");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException malformedVarint() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"CodedInputStream encountered a malformed varint.");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException invalidTag() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"Protocol message contained an invalid tag (zero).");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException invalidEndTag() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"Protocol message end-group tag did not match expected tag.");
|
||||
}
|
||||
|
||||
static InvalidWireTypeException invalidWireType() {
|
||||
return new InvalidWireTypeException(
|
||||
"Protocol message tag had invalid wire type.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Exception indicating that and unexpected wire type was encountered for a field.
|
||||
*/
|
||||
@ExperimentalApi
|
||||
public static class InvalidWireTypeException extends InvalidProtocolBufferException {
|
||||
private static final long serialVersionUID = 3283890091615336259L;
|
||||
|
||||
public InvalidWireTypeException(String description) {
|
||||
super(description);
|
||||
}
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException recursionLimitExceeded() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"Protocol message had too many levels of nesting. May be malicious. " +
|
||||
"Use CodedInputStream.setRecursionLimit() to increase the depth limit.");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException sizeLimitExceeded() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"Protocol message was too large. May be malicious. " +
|
||||
"Use CodedInputStream.setSizeLimit() to increase the size limit.");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException parseFailure() {
|
||||
return new InvalidProtocolBufferException("Failed to parse the message.");
|
||||
}
|
||||
|
||||
static InvalidProtocolBufferException invalidUtf8() {
|
||||
return new InvalidProtocolBufferException("Protocol message had invalid UTF-8.");
|
||||
}
|
||||
}
|
|
@ -1,154 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
/**
|
||||
* LazyField encapsulates the logic of lazily parsing message fields. It stores
|
||||
* the message in a ByteString initially and then parse it on-demand.
|
||||
*
|
||||
* Most of key methods are implemented in {@link LazyFieldLite} but this class
|
||||
* can contain default instance of the message to provide {@code hashCode()},
|
||||
* {@code euqals()} and {@code toString()}.
|
||||
*
|
||||
* @author xiangl@google.com (Xiang Li)
|
||||
*/
|
||||
public class LazyField extends LazyFieldLite {
|
||||
|
||||
/**
|
||||
* Carry a message's default instance which is used by {@code hashCode()}, {@code euqals()} and
|
||||
* {@code toString()}.
|
||||
*/
|
||||
private final MessageLite defaultInstance;
|
||||
|
||||
public LazyField(MessageLite defaultInstance,
|
||||
ExtensionRegistryLite extensionRegistry, ByteString bytes) {
|
||||
super(extensionRegistry, bytes);
|
||||
|
||||
this.defaultInstance = defaultInstance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsDefaultInstance() {
|
||||
return super.containsDefaultInstance() || value == defaultInstance;
|
||||
}
|
||||
|
||||
public MessageLite getValue() {
|
||||
return getValue(defaultInstance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getValue().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return getValue().equals(obj);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getValue().toString();
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
|
||||
/**
|
||||
* LazyEntry and LazyIterator are used to encapsulate the LazyField, when
|
||||
* users iterate all fields from FieldSet.
|
||||
*/
|
||||
static class LazyEntry<K> implements Entry<K, Object> {
|
||||
private Entry<K, LazyField> entry;
|
||||
|
||||
private LazyEntry(Entry<K, LazyField> entry) {
|
||||
this.entry = entry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public K getKey() {
|
||||
return entry.getKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getValue() {
|
||||
LazyField field = entry.getValue();
|
||||
if (field == null) {
|
||||
return null;
|
||||
}
|
||||
return field.getValue();
|
||||
}
|
||||
|
||||
public LazyField getField() {
|
||||
return entry.getValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object setValue(Object value) {
|
||||
if (!(value instanceof MessageLite)) {
|
||||
throw new IllegalArgumentException(
|
||||
"LazyField now only used for MessageSet, "
|
||||
+ "and the value of MessageSet must be an instance of MessageLite");
|
||||
}
|
||||
return entry.getValue().setValue((MessageLite) value);
|
||||
}
|
||||
}
|
||||
|
||||
static class LazyIterator<K> implements Iterator<Entry<K, Object>> {
|
||||
private Iterator<Entry<K, Object>> iterator;
|
||||
|
||||
public LazyIterator(Iterator<Entry<K, Object>> iterator) {
|
||||
this.iterator = iterator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iterator.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Entry<K, Object> next() {
|
||||
Entry<K, ?> entry = iterator.next();
|
||||
if (entry.getValue() instanceof LazyField) {
|
||||
return new LazyEntry<K>((Entry<K, LazyField>) entry);
|
||||
}
|
||||
return (Entry<K, Object>) entry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,437 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* LazyFieldLite encapsulates the logic of lazily parsing message fields. It stores
|
||||
* the message in a ByteString initially and then parses it on-demand.
|
||||
*
|
||||
* LazyFieldLite is thread-compatible: concurrent reads are safe once the proto that this
|
||||
* LazyFieldLite is a part of is no longer being mutated by its Builder. However, explicit
|
||||
* synchronization is needed under read/write situations.
|
||||
*
|
||||
* When a LazyFieldLite is used in the context of a MessageLite object, its behavior is considered
|
||||
* to be immutable and none of the setter methods in its API are expected to be invoked. All of the
|
||||
* getters are expected to be thread-safe. When used in the context of a MessageLite.Builder,
|
||||
* setters can be invoked, but there is no guarantee of thread safety.
|
||||
*
|
||||
* TODO(yatin,dweis): Consider splitting this class's functionality and put the mutable methods
|
||||
* into a separate builder class to allow us to give stronger compile-time guarantees.
|
||||
*
|
||||
* This class is internal implementation detail of the protobuf library, so you don't need to use it
|
||||
* directly.
|
||||
*
|
||||
* @author xiangl@google.com (Xiang Li)
|
||||
*/
|
||||
public class LazyFieldLite {
|
||||
private static final ExtensionRegistryLite EMPTY_REGISTRY =
|
||||
ExtensionRegistryLite.getEmptyRegistry();
|
||||
|
||||
/**
|
||||
* The value associated with the LazyFieldLite object is stored in one or more of the following
|
||||
* three fields (delayedBytes, value, memoizedBytes). They should together be interpreted as
|
||||
* follows.
|
||||
* 1) delayedBytes can be non-null, while value and memoizedBytes is null. The object will be in
|
||||
* this state while the value for the object has not yet been parsed.
|
||||
* 2) Both delayedBytes and value are non-null. The object transitions to this state as soon as
|
||||
* some caller needs to access the value (by invoking getValue()).
|
||||
* 3) memoizedBytes is merely an optimization for calls to LazyFieldLite.toByteString() to avoid
|
||||
* recomputing the ByteString representation on each call. Instead, when the value is parsed
|
||||
* from delayedBytes, we will also assign the contents of delayedBytes to memoizedBytes (since
|
||||
* that is the ByteString representation of value).
|
||||
* 4) Finally, if the LazyFieldLite was created directly with a parsed MessageLite value, then
|
||||
* delayedBytes will be null, and memoizedBytes will be initialized only upon the first call to
|
||||
* LazyFieldLite.toByteString().
|
||||
*
|
||||
* Given the above conditions, any caller that needs a serialized representation of this object
|
||||
* must first check if the memoizedBytes or delayedBytes ByteString is non-null and use it
|
||||
* directly; if both of those are null, it can look at the parsed value field. Similarly, any
|
||||
* caller that needs a parsed value must first check if the value field is already non-null, if
|
||||
* not it must parse the value from delayedBytes.
|
||||
*/
|
||||
|
||||
/**
|
||||
* A delayed-parsed version of the contents of this field. When this field is non-null, then the
|
||||
* "value" field is allowed to be null until the time that the value needs to be read.
|
||||
*
|
||||
* When delayedBytes is non-null then {@code extensionRegistry} is required to also be non-null.
|
||||
* {@code value} and {@code memoizedBytes} will be initialized lazily.
|
||||
*/
|
||||
private ByteString delayedBytes;
|
||||
|
||||
/**
|
||||
* An {@code ExtensionRegistryLite} for parsing bytes. It is non-null on a best-effort basis. It
|
||||
* is only guaranteed to be non-null if this message was initialized using bytes and an
|
||||
* {@code ExtensionRegistry}. If it directly had a value set then it will be null, unless it has
|
||||
* been merged with another {@code LazyFieldLite} that had an {@code ExtensionRegistry}.
|
||||
*/
|
||||
private ExtensionRegistryLite extensionRegistry;
|
||||
|
||||
/**
|
||||
* The parsed value. When this is null and a caller needs access to the MessageLite value, then
|
||||
* {@code delayedBytes} will be parsed lazily at that time.
|
||||
*/
|
||||
protected volatile MessageLite value;
|
||||
|
||||
/**
|
||||
* The memoized bytes for {@code value}. This is an optimization for the toByteString() method to
|
||||
* not have to recompute its return-value on each invocation.
|
||||
* TODO(yatin): Figure out whether this optimization is actually necessary.
|
||||
*/
|
||||
private volatile ByteString memoizedBytes;
|
||||
|
||||
/**
|
||||
* Constructs a LazyFieldLite with bytes that will be parsed lazily.
|
||||
*/
|
||||
public LazyFieldLite(ExtensionRegistryLite extensionRegistry, ByteString bytes) {
|
||||
checkArguments(extensionRegistry, bytes);
|
||||
this.extensionRegistry = extensionRegistry;
|
||||
this.delayedBytes = bytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a LazyFieldLite with no contents, and no ability to parse extensions.
|
||||
*/
|
||||
public LazyFieldLite() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a LazyFieldLite instance with a value. The LazyFieldLite may not be able to parse
|
||||
* the extensions in the value as it has no ExtensionRegistry.
|
||||
*/
|
||||
public static LazyFieldLite fromValue(MessageLite value) {
|
||||
LazyFieldLite lf = new LazyFieldLite();
|
||||
lf.setValue(value);
|
||||
return lf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!(o instanceof LazyFieldLite)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
LazyFieldLite other = (LazyFieldLite) o;
|
||||
|
||||
// Lazy fields do not work well with equals... If both are delayedBytes, we do not have a
|
||||
// mechanism to deserialize them so we rely on bytes equality. Otherwise we coerce into an
|
||||
// actual message (if necessary) and call equals on the message itself. This implies that two
|
||||
// messages can by unequal but then be turned equal simply be invoking a getter on a lazy field.
|
||||
MessageLite value1 = value;
|
||||
MessageLite value2 = other.value;
|
||||
if (value1 == null && value2 == null) {
|
||||
return toByteString().equals(other.toByteString());
|
||||
} else if (value1 != null && value2 != null) {
|
||||
return value1.equals(value2);
|
||||
} else if (value1 != null) {
|
||||
return value1.equals(other.getValue(value1.getDefaultInstanceForType()));
|
||||
} else {
|
||||
return getValue(value2.getDefaultInstanceForType()).equals(value2);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// We can't provide a memoizable hash code for lazy fields. The byte strings may have different
|
||||
// hash codes but evaluate to equivalent messages. And we have no facility for constructing
|
||||
// a message here if we were not already holding a value.
|
||||
return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether this LazyFieldLite instance represents the default instance of this type.
|
||||
*/
|
||||
public boolean containsDefaultInstance() {
|
||||
return memoizedBytes == ByteString.EMPTY
|
||||
|| value == null && (delayedBytes == null || delayedBytes == ByteString.EMPTY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the value state of this instance.
|
||||
*
|
||||
* <p>LazyField is not thread-safe for write access. Synchronizations are needed
|
||||
* under read/write situations.
|
||||
*/
|
||||
public void clear() {
|
||||
// Don't clear the ExtensionRegistry. It might prove useful later on when merging in another
|
||||
// value, but there is no guarantee that it will contain all extensions that were directly set
|
||||
// on the values that need to be merged.
|
||||
delayedBytes = null;
|
||||
value = null;
|
||||
memoizedBytes = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Overrides the contents of this LazyField.
|
||||
*
|
||||
* <p>LazyField is not thread-safe for write access. Synchronizations are needed
|
||||
* under read/write situations.
|
||||
*/
|
||||
public void set(LazyFieldLite other) {
|
||||
this.delayedBytes = other.delayedBytes;
|
||||
this.value = other.value;
|
||||
this.memoizedBytes = other.memoizedBytes;
|
||||
// If the other LazyFieldLite was created by directly setting the value rather than first by
|
||||
// parsing, then it will not have an extensionRegistry. In this case we hold on to the existing
|
||||
// extensionRegistry, which has no guarantees that it has all the extensions that will be
|
||||
// directly set on the value.
|
||||
if (other.extensionRegistry != null) {
|
||||
this.extensionRegistry = other.extensionRegistry;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns message instance. It may do some thread-safe delayed parsing of bytes.
|
||||
*
|
||||
* @param defaultInstance its message's default instance. It's also used to get parser for the
|
||||
* message type.
|
||||
*/
|
||||
public MessageLite getValue(MessageLite defaultInstance) {
|
||||
ensureInitialized(defaultInstance);
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the value of the instance and returns the old value without delay parsing anything.
|
||||
*
|
||||
* <p>LazyField is not thread-safe for write access. Synchronizations are needed
|
||||
* under read/write situations.
|
||||
*/
|
||||
public MessageLite setValue(MessageLite value) {
|
||||
MessageLite originalValue = this.value;
|
||||
this.delayedBytes = null;
|
||||
this.memoizedBytes = null;
|
||||
this.value = value;
|
||||
return originalValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges another instance's contents. In some cases may drop some extensions if both fields
|
||||
* contain data. If the other field has an {@code ExtensionRegistry} but this does not, then this
|
||||
* field will copy over that {@code ExtensionRegistry}.
|
||||
*
|
||||
* <p>LazyField is not thread-safe for write access. Synchronizations are needed
|
||||
* under read/write situations.
|
||||
*/
|
||||
public void merge(LazyFieldLite other) {
|
||||
if (other.containsDefaultInstance()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.containsDefaultInstance()) {
|
||||
set(other);
|
||||
return;
|
||||
}
|
||||
|
||||
// If the other field has an extension registry but this does not, copy over the other extension
|
||||
// registry.
|
||||
if (this.extensionRegistry == null) {
|
||||
this.extensionRegistry = other.extensionRegistry;
|
||||
}
|
||||
|
||||
// In the case that both of them are not parsed we simply concatenate the bytes to save time. In
|
||||
// the (probably rare) case that they have different extension registries there is a chance that
|
||||
// some of the extensions may be dropped, but the tradeoff of making this operation fast seems
|
||||
// to outway the benefits of combining the extension registries, which is not normally done for
|
||||
// lite protos anyways.
|
||||
if (this.delayedBytes != null && other.delayedBytes != null) {
|
||||
this.delayedBytes = this.delayedBytes.concat(other.delayedBytes);
|
||||
return;
|
||||
}
|
||||
|
||||
// At least one is parsed and both contain data. We won't drop any extensions here directly, but
|
||||
// in the case that the extension registries are not the same then we might in the future if we
|
||||
// need to serialze and parse a message again.
|
||||
if (this.value == null && other.value != null) {
|
||||
setValue(mergeValueAndBytes(other.value, this.delayedBytes, this.extensionRegistry));
|
||||
return;
|
||||
} else if (this.value != null && other.value == null) {
|
||||
setValue(mergeValueAndBytes(this.value, other.delayedBytes, other.extensionRegistry));
|
||||
return;
|
||||
}
|
||||
|
||||
// At this point we have two fully parsed messages.
|
||||
setValue(this.value.toBuilder().mergeFrom(other.value).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges another instance's contents from a stream.
|
||||
*
|
||||
* <p>LazyField is not thread-safe for write access. Synchronizations are needed
|
||||
* under read/write situations.
|
||||
*/
|
||||
public void mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException {
|
||||
if (this.containsDefaultInstance()) {
|
||||
setByteString(input.readBytes(), extensionRegistry);
|
||||
return;
|
||||
}
|
||||
|
||||
// If the other field has an extension registry but this does not, copy over the other extension
|
||||
// registry.
|
||||
if (this.extensionRegistry == null) {
|
||||
this.extensionRegistry = extensionRegistry;
|
||||
}
|
||||
|
||||
// In the case that both of them are not parsed we simply concatenate the bytes to save time. In
|
||||
// the (probably rare) case that they have different extension registries there is a chance that
|
||||
// some of the extensions may be dropped, but the tradeoff of making this operation fast seems
|
||||
// to outway the benefits of combining the extension registries, which is not normally done for
|
||||
// lite protos anyways.
|
||||
if (this.delayedBytes != null) {
|
||||
setByteString(this.delayedBytes.concat(input.readBytes()), this.extensionRegistry);
|
||||
return;
|
||||
}
|
||||
|
||||
// We are parsed and both contain data. We won't drop any extensions here directly, but in the
|
||||
// case that the extension registries are not the same then we might in the future if we
|
||||
// need to serialize and parse a message again.
|
||||
try {
|
||||
setValue(value.toBuilder().mergeFrom(input, extensionRegistry).build());
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// Nothing is logged and no exceptions are thrown. Clients will be unaware that a proto
|
||||
// was invalid.
|
||||
}
|
||||
}
|
||||
|
||||
private static MessageLite mergeValueAndBytes(
|
||||
MessageLite value, ByteString otherBytes, ExtensionRegistryLite extensionRegistry) {
|
||||
try {
|
||||
return value.toBuilder().mergeFrom(otherBytes, extensionRegistry).build();
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// Nothing is logged and no exceptions are thrown. Clients will be unaware that a proto
|
||||
// was invalid.
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets this field with bytes to delay-parse.
|
||||
*/
|
||||
public void setByteString(ByteString bytes, ExtensionRegistryLite extensionRegistry) {
|
||||
checkArguments(extensionRegistry, bytes);
|
||||
this.delayedBytes = bytes;
|
||||
this.extensionRegistry = extensionRegistry;
|
||||
this.value = null;
|
||||
this.memoizedBytes = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Due to the optional field can be duplicated at the end of serialized
|
||||
* bytes, which will make the serialized size changed after LazyField
|
||||
* parsed. Be careful when using this method.
|
||||
*/
|
||||
public int getSerializedSize() {
|
||||
// We *must* return delayed bytes size if it was ever set because the dependent messages may
|
||||
// have memoized serialized size based off of it.
|
||||
if (memoizedBytes != null) {
|
||||
return memoizedBytes.size();
|
||||
} else if (delayedBytes != null) {
|
||||
return delayedBytes.size();
|
||||
} else if (value != null) {
|
||||
return value.getSerializedSize();
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a BytesString for this field in a thread-safe way.
|
||||
*/
|
||||
public ByteString toByteString() {
|
||||
if (memoizedBytes != null) {
|
||||
return memoizedBytes;
|
||||
}
|
||||
// We *must* return delayed bytes if it was set because the dependent messages may have
|
||||
// memoized serialized size based off of it.
|
||||
if (delayedBytes != null) {
|
||||
return delayedBytes;
|
||||
}
|
||||
synchronized (this) {
|
||||
if (memoizedBytes != null) {
|
||||
return memoizedBytes;
|
||||
}
|
||||
if (value == null) {
|
||||
memoizedBytes = ByteString.EMPTY;
|
||||
} else {
|
||||
memoizedBytes = value.toByteString();
|
||||
}
|
||||
return memoizedBytes;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Might lazily parse the bytes that were previously passed in. Is thread-safe.
|
||||
*/
|
||||
protected void ensureInitialized(MessageLite defaultInstance) {
|
||||
if (value != null) {
|
||||
return;
|
||||
}
|
||||
synchronized (this) {
|
||||
if (value != null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (delayedBytes != null) {
|
||||
// The extensionRegistry shouldn't be null here since we have delayedBytes.
|
||||
MessageLite parsedValue = defaultInstance.getParserForType()
|
||||
.parseFrom(delayedBytes, extensionRegistry);
|
||||
this.value = parsedValue;
|
||||
this.memoizedBytes = delayedBytes;
|
||||
} else {
|
||||
this.value = defaultInstance;
|
||||
this.memoizedBytes = ByteString.EMPTY;
|
||||
}
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// Nothing is logged and no exceptions are thrown. Clients will be unaware that this proto
|
||||
// was invalid.
|
||||
this.value = defaultInstance;
|
||||
this.memoizedBytes = ByteString.EMPTY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static void checkArguments(ExtensionRegistryLite extensionRegistry, ByteString bytes) {
|
||||
if (extensionRegistry == null) {
|
||||
throw new NullPointerException("found null ExtensionRegistry");
|
||||
}
|
||||
if (bytes == null) {
|
||||
throw new NullPointerException("found null ByteString");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,423 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.util.AbstractList;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An implementation of {@link LazyStringList} that wraps an ArrayList. Each
|
||||
* element is one of String, ByteString, or byte[]. It caches the last one
|
||||
* requested which is most likely the one needed next. This minimizes memory
|
||||
* usage while satisfying the most common use cases.
|
||||
* <p>
|
||||
* <strong>Note that this implementation is not synchronized.</strong>
|
||||
* If multiple threads access an <tt>ArrayList</tt> instance concurrently,
|
||||
* and at least one of the threads modifies the list structurally, it
|
||||
* <i>must</i> be synchronized externally. (A structural modification is
|
||||
* any operation that adds or deletes one or more elements, or explicitly
|
||||
* resizes the backing array; merely setting the value of an element is not
|
||||
* a structural modification.) This is typically accomplished by
|
||||
* synchronizing on some object that naturally encapsulates the list.
|
||||
* <p>
|
||||
* If the implementation is accessed via concurrent reads, this is thread safe.
|
||||
* Conversions are done in a thread safe manner. It's possible that the
|
||||
* conversion may happen more than once if two threads attempt to access the
|
||||
* same element and the modifications were not visible to each other, but this
|
||||
* will not result in any corruption of the list or change in behavior other
|
||||
* than performance.
|
||||
*
|
||||
* @author jonp@google.com (Jon Perlow)
|
||||
*/
|
||||
public class LazyStringArrayList extends AbstractProtobufList<String>
|
||||
implements LazyStringList, RandomAccess {
|
||||
|
||||
private static final LazyStringArrayList EMPTY_LIST = new LazyStringArrayList();
|
||||
static {
|
||||
EMPTY_LIST.makeImmutable();
|
||||
}
|
||||
|
||||
static LazyStringArrayList emptyList() {
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
// For compatibility with older runtimes.
|
||||
public static final LazyStringList EMPTY = EMPTY_LIST;
|
||||
|
||||
private final List<Object> list;
|
||||
|
||||
public LazyStringArrayList() {
|
||||
this(DEFAULT_CAPACITY);
|
||||
}
|
||||
|
||||
public LazyStringArrayList(int intialCapacity) {
|
||||
this(new ArrayList<Object>(intialCapacity));
|
||||
}
|
||||
|
||||
public LazyStringArrayList(LazyStringList from) {
|
||||
list = new ArrayList<Object>(from.size());
|
||||
addAll(from);
|
||||
}
|
||||
|
||||
public LazyStringArrayList(List<String> from) {
|
||||
this(new ArrayList<Object>(from));
|
||||
}
|
||||
|
||||
private LazyStringArrayList(ArrayList<Object> list) {
|
||||
this.list = list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LazyStringArrayList mutableCopyWithCapacity(int capacity) {
|
||||
if (capacity < size()) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
ArrayList<Object> newList = new ArrayList<Object>(capacity);
|
||||
newList.addAll(list);
|
||||
return new LazyStringArrayList(newList);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String get(int index) {
|
||||
Object o = list.get(index);
|
||||
if (o instanceof String) {
|
||||
return (String) o;
|
||||
} else if (o instanceof ByteString) {
|
||||
ByteString bs = (ByteString) o;
|
||||
String s = bs.toStringUtf8();
|
||||
if (bs.isValidUtf8()) {
|
||||
list.set(index, s);
|
||||
}
|
||||
return s;
|
||||
} else {
|
||||
byte[] ba = (byte[]) o;
|
||||
String s = Internal.toStringUtf8(ba);
|
||||
if (Internal.isValidUtf8(ba)) {
|
||||
list.set(index, s);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return list.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String set(int index, String s) {
|
||||
ensureIsMutable();
|
||||
Object o = list.set(index, s);
|
||||
return asString(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, String element) {
|
||||
ensureIsMutable();
|
||||
list.add(index, element);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
private void add(int index, ByteString element) {
|
||||
ensureIsMutable();
|
||||
list.add(index, element);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
private void add(int index, byte[] element) {
|
||||
ensureIsMutable();
|
||||
list.add(index, element);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends String> c) {
|
||||
// The default implementation of AbstractCollection.addAll(Collection)
|
||||
// delegates to add(Object). This implementation instead delegates to
|
||||
// addAll(int, Collection), which makes a special case for Collections
|
||||
// which are instances of LazyStringList.
|
||||
return addAll(size(), c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(int index, Collection<? extends String> c) {
|
||||
ensureIsMutable();
|
||||
// When copying from another LazyStringList, directly copy the underlying
|
||||
// elements rather than forcing each element to be decoded to a String.
|
||||
Collection<?> collection = c instanceof LazyStringList
|
||||
? ((LazyStringList) c).getUnderlyingElements() : c;
|
||||
boolean ret = list.addAll(index, collection);
|
||||
modCount++;
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAllByteString(Collection<? extends ByteString> values) {
|
||||
ensureIsMutable();
|
||||
boolean ret = list.addAll(values);
|
||||
modCount++;
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAllByteArray(Collection<byte[]> c) {
|
||||
ensureIsMutable();
|
||||
boolean ret = list.addAll(c);
|
||||
modCount++;
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String remove(int index) {
|
||||
ensureIsMutable();
|
||||
Object o = list.remove(index);
|
||||
modCount++;
|
||||
return asString(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
ensureIsMutable();
|
||||
list.clear();
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(ByteString element) {
|
||||
ensureIsMutable();
|
||||
list.add(element);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(byte[] element) {
|
||||
ensureIsMutable();
|
||||
list.add(element);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRaw(int index) {
|
||||
return list.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteString getByteString(int index) {
|
||||
Object o = list.get(index);
|
||||
ByteString b = asByteString(o);
|
||||
if (b != o) {
|
||||
list.set(index, b);
|
||||
}
|
||||
return b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getByteArray(int index) {
|
||||
Object o = list.get(index);
|
||||
byte[] b = asByteArray(o);
|
||||
if (b != o) {
|
||||
list.set(index, b);
|
||||
}
|
||||
return b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(int index, ByteString s) {
|
||||
setAndReturn(index, s);
|
||||
}
|
||||
|
||||
private Object setAndReturn(int index, ByteString s) {
|
||||
ensureIsMutable();
|
||||
return list.set(index, s);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(int index, byte[] s) {
|
||||
setAndReturn(index, s);
|
||||
}
|
||||
|
||||
private Object setAndReturn(int index, byte[] s) {
|
||||
ensureIsMutable();
|
||||
return list.set(index, s);
|
||||
}
|
||||
|
||||
private static String asString(Object o) {
|
||||
if (o instanceof String) {
|
||||
return (String) o;
|
||||
} else if (o instanceof ByteString) {
|
||||
return ((ByteString) o).toStringUtf8();
|
||||
} else {
|
||||
return Internal.toStringUtf8((byte[]) o);
|
||||
}
|
||||
}
|
||||
|
||||
private static ByteString asByteString(Object o) {
|
||||
if (o instanceof ByteString) {
|
||||
return (ByteString) o;
|
||||
} else if (o instanceof String) {
|
||||
return ByteString.copyFromUtf8((String) o);
|
||||
} else {
|
||||
return ByteString.copyFrom((byte[]) o);
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] asByteArray(Object o) {
|
||||
if (o instanceof byte[]) {
|
||||
return (byte[]) o;
|
||||
} else if (o instanceof String) {
|
||||
return Internal.toByteArray((String) o);
|
||||
} else {
|
||||
return ((ByteString) o).toByteArray();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<?> getUnderlyingElements() {
|
||||
return Collections.unmodifiableList(list);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mergeFrom(LazyStringList other) {
|
||||
ensureIsMutable();
|
||||
for (Object o : other.getUnderlyingElements()) {
|
||||
if (o instanceof byte[]) {
|
||||
byte[] b = (byte[]) o;
|
||||
// Byte array's content is mutable so they should be copied rather than
|
||||
// shared when merging from one message to another.
|
||||
list.add(Arrays.copyOf(b, b.length));
|
||||
} else {
|
||||
list.add(o);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static class ByteArrayListView extends AbstractList<byte[]>
|
||||
implements RandomAccess {
|
||||
private final LazyStringArrayList list;
|
||||
|
||||
ByteArrayListView(LazyStringArrayList list) {
|
||||
this.list = list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] get(int index) {
|
||||
return list.getByteArray(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return list.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] set(int index, byte[] s) {
|
||||
Object o = list.setAndReturn(index, s);
|
||||
modCount++;
|
||||
return asByteArray(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, byte[] s) {
|
||||
list.add(index, s);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] remove(int index) {
|
||||
Object o = list.remove(index);
|
||||
modCount++;
|
||||
return asByteArray(o);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<byte[]> asByteArrayList() {
|
||||
return new ByteArrayListView(this);
|
||||
}
|
||||
|
||||
private static class ByteStringListView extends AbstractList<ByteString>
|
||||
implements RandomAccess {
|
||||
private final LazyStringArrayList list;
|
||||
|
||||
ByteStringListView(LazyStringArrayList list) {
|
||||
this.list = list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteString get(int index) {
|
||||
return list.getByteString(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return list.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteString set(int index, ByteString s) {
|
||||
Object o = list.setAndReturn(index, s);
|
||||
modCount++;
|
||||
return asByteString(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, ByteString s) {
|
||||
list.add(index, s);
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteString remove(int index) {
|
||||
Object o = list.remove(index);
|
||||
modCount++;
|
||||
return asByteString(o);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ByteString> asByteStringList() {
|
||||
return new ByteStringListView(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LazyStringList getUnmodifiableView() {
|
||||
if (isModifiable()) {
|
||||
return new UnmodifiableLazyStringList(this);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,174 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An interface extending {@code List<String>} that also provides access to the
|
||||
* items of the list as UTF8-encoded ByteString or byte[] objects. This is
|
||||
* used by the protocol buffer implementation to support lazily converting bytes
|
||||
* parsed over the wire to String objects until needed and also increases the
|
||||
* efficiency of serialization if the String was never requested as the
|
||||
* ByteString or byte[] is already cached. The ByteString methods are used in
|
||||
* immutable API only and byte[] methods used in mutable API only for they use
|
||||
* different representations for string/bytes fields.
|
||||
*
|
||||
* @author jonp@google.com (Jon Perlow)
|
||||
*/
|
||||
public interface LazyStringList extends ProtocolStringList {
|
||||
|
||||
/**
|
||||
* Returns the element at the specified position in this list as a ByteString.
|
||||
*
|
||||
* @param index index of the element to return
|
||||
* @return the element at the specified position in this list
|
||||
* @throws IndexOutOfBoundsException if the index is out of range
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
ByteString getByteString(int index);
|
||||
|
||||
/**
|
||||
* Returns the element at the specified position in this list as an Object
|
||||
* that will either be a String or a ByteString.
|
||||
*
|
||||
* @param index index of the element to return
|
||||
* @return the element at the specified position in this list
|
||||
* @throws IndexOutOfBoundsException if the index is out of range
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
Object getRaw(int index);
|
||||
|
||||
/**
|
||||
* Returns the element at the specified position in this list as byte[].
|
||||
*
|
||||
* @param index index of the element to return
|
||||
* @return the element at the specified position in this list
|
||||
* @throws IndexOutOfBoundsException if the index is out of range
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
byte[] getByteArray(int index);
|
||||
|
||||
/**
|
||||
* Appends the specified element to the end of this list (optional
|
||||
* operation).
|
||||
*
|
||||
* @param element element to be appended to this list
|
||||
* @throws UnsupportedOperationException if the <tt>add</tt> operation
|
||||
* is not supported by this list
|
||||
*/
|
||||
void add(ByteString element);
|
||||
|
||||
/**
|
||||
* Appends the specified element to the end of this list (optional
|
||||
* operation).
|
||||
*
|
||||
* @param element element to be appended to this list
|
||||
* @throws UnsupportedOperationException if the <tt>add</tt> operation
|
||||
* is not supported by this list
|
||||
*/
|
||||
void add(byte[] element);
|
||||
|
||||
/**
|
||||
* Replaces the element at the specified position in this list with the
|
||||
* specified element (optional operation).
|
||||
*
|
||||
* @param index index of the element to replace
|
||||
* @param element the element to be stored at the specified position
|
||||
* @throws UnsupportedOperationException if the <tt>set</tt> operation
|
||||
* is not supported by this list
|
||||
* IndexOutOfBoundsException if the index is out of range
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
void set(int index, ByteString element);
|
||||
|
||||
/**
|
||||
* Replaces the element at the specified position in this list with the
|
||||
* specified element (optional operation).
|
||||
*
|
||||
* @param index index of the element to replace
|
||||
* @param element the element to be stored at the specified position
|
||||
* @throws UnsupportedOperationException if the <tt>set</tt> operation
|
||||
* is not supported by this list
|
||||
* IndexOutOfBoundsException if the index is out of range
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
void set(int index, byte[] element);
|
||||
|
||||
/**
|
||||
* Appends all elements in the specified ByteString collection to the end of
|
||||
* this list.
|
||||
*
|
||||
* @param c collection whose elements are to be added to this list
|
||||
* @return true if this list changed as a result of the call
|
||||
* @throws UnsupportedOperationException if the <tt>addAllByteString</tt>
|
||||
* operation is not supported by this list
|
||||
*/
|
||||
boolean addAllByteString(Collection<? extends ByteString> c);
|
||||
|
||||
/**
|
||||
* Appends all elements in the specified byte[] collection to the end of
|
||||
* this list.
|
||||
*
|
||||
* @param c collection whose elements are to be added to this list
|
||||
* @return true if this list changed as a result of the call
|
||||
* @throws UnsupportedOperationException if the <tt>addAllByteArray</tt>
|
||||
* operation is not supported by this list
|
||||
*/
|
||||
boolean addAllByteArray(Collection<byte[]> c);
|
||||
|
||||
/**
|
||||
* Returns an unmodifiable List of the underlying elements, each of which is
|
||||
* either a {@code String} or its equivalent UTF-8 encoded {@code ByteString}
|
||||
* or byte[]. It is an error for the caller to modify the returned
|
||||
* List, and attempting to do so will result in an
|
||||
* {@link UnsupportedOperationException}.
|
||||
*/
|
||||
List<?> getUnderlyingElements();
|
||||
|
||||
/**
|
||||
* Merges all elements from another LazyStringList into this one. This method
|
||||
* differs from {@link #addAll(Collection)} on that underlying byte arrays are
|
||||
* copied instead of reference shared. Immutable API doesn't need to use this
|
||||
* method as byte[] is not used there at all.
|
||||
*/
|
||||
void mergeFrom(LazyStringList other);
|
||||
|
||||
/**
|
||||
* Returns a mutable view of this list. Changes to the view will be made into
|
||||
* the original list. This method is used in mutable API only.
|
||||
*/
|
||||
List<byte[]> asByteArrayList();
|
||||
|
||||
/** Returns an unmodifiable view of the list. */
|
||||
LazyStringList getUnmodifiableView();
|
||||
}
|
|
@ -1,814 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/struct.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* `ListValue` is a wrapper around a repeated field of values.
|
||||
* The JSON representation for `ListValue` is JSON array.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.ListValue}
|
||||
*/
|
||||
public final class ListValue extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
|
||||
// @@protoc_insertion_point(message_implements:google.protobuf.ListValue)
|
||||
ListValueOrBuilder {
|
||||
// Use ListValue.newBuilder() to construct.
|
||||
private ListValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
|
||||
super(builder);
|
||||
}
|
||||
private ListValue() {
|
||||
values_ = java.util.Collections.emptyList();
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
private ListValue(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
this();
|
||||
int mutable_bitField0_ = 0;
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!input.skipField(tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
values_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>();
|
||||
mutable_bitField0_ |= 0x00000001;
|
||||
}
|
||||
values_.add(
|
||||
input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.parser(), extensionRegistry));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
|
||||
e).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
values_ = java.util.Collections.unmodifiableList(values_);
|
||||
}
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_ListValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_ListValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.Builder.class);
|
||||
}
|
||||
|
||||
public static final int VALUES_FIELD_NUMBER = 1;
|
||||
private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> values_;
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> getValuesList() {
|
||||
return values_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder>
|
||||
getValuesOrBuilderList() {
|
||||
return values_;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public int getValuesCount() {
|
||||
return values_.size();
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getValues(int index) {
|
||||
return values_.get(index);
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder getValuesOrBuilder(
|
||||
int index) {
|
||||
return values_.get(index);
|
||||
}
|
||||
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized == 1) return true;
|
||||
if (isInitialized == 0) return false;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
for (int i = 0; i < values_.size(); i++) {
|
||||
output.writeMessage(1, values_.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
for (int i = 0; i < values_.size(); i++) {
|
||||
size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
|
||||
.computeMessageSize(1, values_.get(i));
|
||||
}
|
||||
memoizedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && getValuesList()
|
||||
.equals(other.getValuesList());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptor().hashCode();
|
||||
if (getValuesCount() > 0) {
|
||||
hash = (37 * hash) + VALUES_FIELD_NUMBER;
|
||||
hash = (53 * hash) + getValuesList().hashCode();
|
||||
}
|
||||
hash = (29 * hash) + unknownFields.hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(byte[] data)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(
|
||||
byte[] data,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input);
|
||||
}
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder() {
|
||||
return DEFAULT_INSTANCE.toBuilder();
|
||||
}
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue prototype) {
|
||||
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() {
|
||||
return this == DEFAULT_INSTANCE
|
||||
? new Builder() : new Builder().mergeFrom(this);
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* `ListValue` is a wrapper around a repeated field of values.
|
||||
* The JSON representation for `ListValue` is JSON array.
|
||||
* </pre>
|
||||
*
|
||||
* Protobuf type {@code google.protobuf.ListValue}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
|
||||
// @@protoc_insertion_point(builder_implements:google.protobuf.ListValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValueOrBuilder {
|
||||
public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_ListValue_descriptor;
|
||||
}
|
||||
|
||||
protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_ListValue_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.Builder.class);
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
|
||||
.alwaysUseFieldBuilders) {
|
||||
getValuesFieldBuilder();
|
||||
}
|
||||
}
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
if (valuesBuilder_ == null) {
|
||||
values_ = java.util.Collections.emptyList();
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
} else {
|
||||
valuesBuilder_.clear();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.StructProto.internal_static_google_protobuf_ListValue_descriptor;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue build() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue buildPartial() {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
if (valuesBuilder_ == null) {
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
values_ = java.util.Collections.unmodifiableList(values_);
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
}
|
||||
result.values_ = values_;
|
||||
} else {
|
||||
result.values_ = valuesBuilder_.build();
|
||||
}
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return (Builder) super.clone();
|
||||
}
|
||||
public Builder setField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.setField(field, value);
|
||||
}
|
||||
public Builder clearField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) {
|
||||
return (Builder) super.clearField(field);
|
||||
}
|
||||
public Builder clearOneof(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) {
|
||||
return (Builder) super.clearOneof(oneof);
|
||||
}
|
||||
public Builder setRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
int index, Object value) {
|
||||
return (Builder) super.setRepeatedField(field, index, value);
|
||||
}
|
||||
public Builder addRepeatedField(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field,
|
||||
Object value) {
|
||||
return (Builder) super.addRepeatedField(field, value);
|
||||
}
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue other) {
|
||||
if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue.getDefaultInstance()) return this;
|
||||
if (valuesBuilder_ == null) {
|
||||
if (!other.values_.isEmpty()) {
|
||||
if (values_.isEmpty()) {
|
||||
values_ = other.values_;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
} else {
|
||||
ensureValuesIsMutable();
|
||||
values_.addAll(other.values_);
|
||||
}
|
||||
onChanged();
|
||||
}
|
||||
} else {
|
||||
if (!other.values_.isEmpty()) {
|
||||
if (valuesBuilder_.isEmpty()) {
|
||||
valuesBuilder_.dispose();
|
||||
valuesBuilder_ = null;
|
||||
values_ = other.values_;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
valuesBuilder_ =
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
|
||||
getValuesFieldBuilder() : null;
|
||||
} else {
|
||||
valuesBuilder_.addAllMessages(other.values_);
|
||||
}
|
||||
}
|
||||
}
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue) e.getUnfinishedMessage();
|
||||
throw e.unwrapIOException();
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
private int bitField0_;
|
||||
|
||||
private java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> values_ =
|
||||
java.util.Collections.emptyList();
|
||||
private void ensureValuesIsMutable() {
|
||||
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
values_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>(values_);
|
||||
bitField0_ |= 0x00000001;
|
||||
}
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Value, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder> valuesBuilder_;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> getValuesList() {
|
||||
if (valuesBuilder_ == null) {
|
||||
return java.util.Collections.unmodifiableList(values_);
|
||||
} else {
|
||||
return valuesBuilder_.getMessageList();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public int getValuesCount() {
|
||||
if (valuesBuilder_ == null) {
|
||||
return values_.size();
|
||||
} else {
|
||||
return valuesBuilder_.getCount();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getValues(int index) {
|
||||
if (valuesBuilder_ == null) {
|
||||
return values_.get(index);
|
||||
} else {
|
||||
return valuesBuilder_.getMessage(index);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder setValues(
|
||||
int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value value) {
|
||||
if (valuesBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
ensureValuesIsMutable();
|
||||
values_.set(index, value);
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.setMessage(index, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder setValues(
|
||||
int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder builderForValue) {
|
||||
if (valuesBuilder_ == null) {
|
||||
ensureValuesIsMutable();
|
||||
values_.set(index, builderForValue.build());
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.setMessage(index, builderForValue.build());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder addValues(org.apache.hadoop.hbase.shaded.com.google.protobuf.Value value) {
|
||||
if (valuesBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
ensureValuesIsMutable();
|
||||
values_.add(value);
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.addMessage(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder addValues(
|
||||
int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value value) {
|
||||
if (valuesBuilder_ == null) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
ensureValuesIsMutable();
|
||||
values_.add(index, value);
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.addMessage(index, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder addValues(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder builderForValue) {
|
||||
if (valuesBuilder_ == null) {
|
||||
ensureValuesIsMutable();
|
||||
values_.add(builderForValue.build());
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.addMessage(builderForValue.build());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder addValues(
|
||||
int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder builderForValue) {
|
||||
if (valuesBuilder_ == null) {
|
||||
ensureValuesIsMutable();
|
||||
values_.add(index, builderForValue.build());
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.addMessage(index, builderForValue.build());
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder addAllValues(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.Value> values) {
|
||||
if (valuesBuilder_ == null) {
|
||||
ensureValuesIsMutable();
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll(
|
||||
values, values_);
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.addAllMessages(values);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder clearValues() {
|
||||
if (valuesBuilder_ == null) {
|
||||
values_ = java.util.Collections.emptyList();
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.clear();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public Builder removeValues(int index) {
|
||||
if (valuesBuilder_ == null) {
|
||||
ensureValuesIsMutable();
|
||||
values_.remove(index);
|
||||
onChanged();
|
||||
} else {
|
||||
valuesBuilder_.remove(index);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder getValuesBuilder(
|
||||
int index) {
|
||||
return getValuesFieldBuilder().getBuilder(index);
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder getValuesOrBuilder(
|
||||
int index) {
|
||||
if (valuesBuilder_ == null) {
|
||||
return values_.get(index); } else {
|
||||
return valuesBuilder_.getMessageOrBuilder(index);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder>
|
||||
getValuesOrBuilderList() {
|
||||
if (valuesBuilder_ != null) {
|
||||
return valuesBuilder_.getMessageOrBuilderList();
|
||||
} else {
|
||||
return java.util.Collections.unmodifiableList(values_);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder addValuesBuilder() {
|
||||
return getValuesFieldBuilder().addBuilder(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.getDefaultInstance());
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder addValuesBuilder(
|
||||
int index) {
|
||||
return getValuesFieldBuilder().addBuilder(
|
||||
index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.getDefaultInstance());
|
||||
}
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
public java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder>
|
||||
getValuesBuilderList() {
|
||||
return getValuesFieldBuilder().getBuilderList();
|
||||
}
|
||||
private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Value, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder>
|
||||
getValuesFieldBuilder() {
|
||||
if (valuesBuilder_ == null) {
|
||||
valuesBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3<
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Value, org.apache.hadoop.hbase.shaded.com.google.protobuf.Value.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder>(
|
||||
values_,
|
||||
((bitField0_ & 0x00000001) == 0x00000001),
|
||||
getParentForChildren(),
|
||||
isClean());
|
||||
values_ = null;
|
||||
}
|
||||
return valuesBuilder_;
|
||||
}
|
||||
public final Builder setUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public final Builder mergeUnknownFields(
|
||||
final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:google.protobuf.ListValue)
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:google.protobuf.ListValue)
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue DEFAULT_INSTANCE;
|
||||
static {
|
||||
DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue();
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue getDefaultInstance() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ListValue>
|
||||
PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ListValue>() {
|
||||
public ListValue parsePartialFrom(
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new ListValue(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ListValue> parser() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ListValue> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue getDefaultInstanceForType() {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,53 +0,0 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: google/protobuf/struct.proto
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
public interface ListValueOrBuilder extends
|
||||
// @@protoc_insertion_point(interface_extends:google.protobuf.ListValue)
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.shaded.com.google.protobuf.Value>
|
||||
getValuesList();
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.Value getValues(int index);
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
int getValuesCount();
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder>
|
||||
getValuesOrBuilderList();
|
||||
/**
|
||||
* <pre>
|
||||
* Repeated field of dynamically typed values.
|
||||
* </pre>
|
||||
*
|
||||
* <code>repeated .google.protobuf.Value values = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.shaded.com.google.protobuf.ValueOrBuilder getValuesOrBuilder(
|
||||
int index);
|
||||
}
|
|
@ -1,272 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.LongList;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.RandomAccess;
|
||||
|
||||
/**
|
||||
* An implementation of {@link LongList} on top of a primitive array.
|
||||
*
|
||||
* @author dweis@google.com (Daniel Weis)
|
||||
*/
|
||||
final class LongArrayList
|
||||
extends AbstractProtobufList<Long>
|
||||
implements LongList, RandomAccess {
|
||||
|
||||
private static final LongArrayList EMPTY_LIST = new LongArrayList();
|
||||
static {
|
||||
EMPTY_LIST.makeImmutable();
|
||||
}
|
||||
|
||||
public static LongArrayList emptyList() {
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
/**
|
||||
* The backing store for the list.
|
||||
*/
|
||||
private long[] array;
|
||||
|
||||
/**
|
||||
* The size of the list distinct from the length of the array. That is, it is the number of
|
||||
* elements set in the list.
|
||||
*/
|
||||
private int size;
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code LongArrayList} with default capacity.
|
||||
*/
|
||||
LongArrayList() {
|
||||
this(new long[DEFAULT_CAPACITY], 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new mutable {@code LongArrayList}
|
||||
* containing the same elements as {@code other}.
|
||||
*/
|
||||
private LongArrayList(long[] other, int size) {
|
||||
array = other;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (!(o instanceof LongArrayList)) {
|
||||
return super.equals(o);
|
||||
}
|
||||
LongArrayList other = (LongArrayList) o;
|
||||
if (size != other.size) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final long[] arr = other.array;
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (array[i] != arr[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 1;
|
||||
for (int i = 0; i < size; i++) {
|
||||
result = (31 * result) + Internal.hashLong(array[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LongList mutableCopyWithCapacity(int capacity) {
|
||||
if (capacity < size) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return new LongArrayList(Arrays.copyOf(array, capacity), size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long get(int index) {
|
||||
return getLong(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLong(int index) {
|
||||
ensureIndexInRange(index);
|
||||
return array[index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long set(int index, Long element) {
|
||||
return setLong(index, element);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long setLong(int index, long element) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
long previousValue = array[index];
|
||||
array[index] = element;
|
||||
return previousValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void add(int index, Long element) {
|
||||
addLong(index, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(Long)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
@Override
|
||||
public void addLong(long element) {
|
||||
addLong(size, element);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like {@link #add(int, Long)} but more efficient in that it doesn't box the element.
|
||||
*/
|
||||
private void addLong(int index, long element) {
|
||||
ensureIsMutable();
|
||||
if (index < 0 || index > size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
|
||||
if (size < array.length) {
|
||||
// Shift everything over to make room
|
||||
System.arraycopy(array, index, array, index + 1, size - index);
|
||||
} else {
|
||||
// Resize to 1.5x the size
|
||||
int length = ((size * 3) / 2) + 1;
|
||||
long[] newArray = new long[length];
|
||||
|
||||
// Copy the first part directly
|
||||
System.arraycopy(array, 0, newArray, 0, index);
|
||||
|
||||
// Copy the rest shifted over by one to make room
|
||||
System.arraycopy(array, index, newArray, index + 1, size - index);
|
||||
array = newArray;
|
||||
}
|
||||
|
||||
array[index] = element;
|
||||
size++;
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends Long> collection) {
|
||||
ensureIsMutable();
|
||||
|
||||
if (collection == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
||||
// We specialize when adding another LongArrayList to avoid boxing elements.
|
||||
if (!(collection instanceof LongArrayList)) {
|
||||
return super.addAll(collection);
|
||||
}
|
||||
|
||||
LongArrayList list = (LongArrayList) collection;
|
||||
if (list.size == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int overflow = Integer.MAX_VALUE - size;
|
||||
if (overflow < list.size) {
|
||||
// We can't actually represent a list this large.
|
||||
throw new OutOfMemoryError();
|
||||
}
|
||||
|
||||
int newSize = size + list.size;
|
||||
if (newSize > array.length) {
|
||||
array = Arrays.copyOf(array, newSize);
|
||||
}
|
||||
|
||||
System.arraycopy(list.array, 0, array, size, list.size);
|
||||
size = newSize;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
ensureIsMutable();
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (o.equals(array[i])) {
|
||||
System.arraycopy(array, i + 1, array, i, size - i);
|
||||
size--;
|
||||
modCount++;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long remove(int index) {
|
||||
ensureIsMutable();
|
||||
ensureIndexInRange(index);
|
||||
long value = array[index];
|
||||
System.arraycopy(array, index + 1, array, index, size - index);
|
||||
size--;
|
||||
modCount++;
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an
|
||||
* {@link IndexOutOfBoundsException} if it is not.
|
||||
*
|
||||
* @param index the index to verify is in range
|
||||
*/
|
||||
private void ensureIndexInRange(int index) {
|
||||
if (index < 0 || index >= size) {
|
||||
throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index));
|
||||
}
|
||||
}
|
||||
|
||||
private String makeOutOfBoundsExceptionMessage(int index) {
|
||||
return "Index:" + index + ", Size:" + size;
|
||||
}
|
||||
}
|
|
@ -1,449 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* Implements MapEntry messages.
|
||||
*
|
||||
* In reflection API, map fields will be treated as repeated message fields and
|
||||
* each map entry is accessed as a message. This MapEntry class is used to
|
||||
* represent these map entry messages in reflection API.
|
||||
*
|
||||
* Protobuf internal. Users shouldn't use this class.
|
||||
*/
|
||||
public final class MapEntry<K, V> extends AbstractMessage {
|
||||
|
||||
private static final class Metadata<K, V> extends MapEntryLite.Metadata<K, V> {
|
||||
|
||||
public final Descriptor descriptor;
|
||||
public final Parser<MapEntry<K, V>> parser;
|
||||
|
||||
public Metadata(
|
||||
Descriptor descriptor,
|
||||
MapEntry<K, V> defaultInstance,
|
||||
WireFormat.FieldType keyType,
|
||||
WireFormat.FieldType valueType) {
|
||||
super(keyType, defaultInstance.key, valueType, defaultInstance.value);
|
||||
this.descriptor = descriptor;
|
||||
this.parser = new AbstractParser<MapEntry<K, V>>() {
|
||||
|
||||
@Override
|
||||
public MapEntry<K, V> parsePartialFrom(
|
||||
CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
return new MapEntry<K, V>(Metadata.this, input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private final K key;
|
||||
private final V value;
|
||||
private final Metadata<K, V> metadata;
|
||||
|
||||
/** Create a default MapEntry instance. */
|
||||
private MapEntry(
|
||||
Descriptor descriptor,
|
||||
WireFormat.FieldType keyType, K defaultKey,
|
||||
WireFormat.FieldType valueType, V defaultValue) {
|
||||
this.key = defaultKey;
|
||||
this.value = defaultValue;
|
||||
this.metadata = new Metadata<K, V>(descriptor, this, keyType, valueType);
|
||||
}
|
||||
|
||||
/** Create a MapEntry with the provided key and value. */
|
||||
private MapEntry(Metadata metadata, K key, V value) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
/** Parsing constructor. */
|
||||
private MapEntry(
|
||||
Metadata<K, V> metadata,
|
||||
CodedInputStream input,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException {
|
||||
try {
|
||||
this.metadata = metadata;
|
||||
Map.Entry<K, V> entry = MapEntryLite.parseEntry(input, metadata, extensionRegistry);
|
||||
this.key = entry.getKey();
|
||||
this.value = entry.getValue();
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (IOException e) {
|
||||
throw new InvalidProtocolBufferException(e).setUnfinishedMessage(this);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a default MapEntry instance. A default MapEntry instance should be
|
||||
* created only once for each map entry message type. Generated code should
|
||||
* store the created default instance and use it later to create new MapEntry
|
||||
* messages of the same type.
|
||||
*/
|
||||
public static <K, V> MapEntry<K, V> newDefaultInstance(
|
||||
Descriptor descriptor,
|
||||
WireFormat.FieldType keyType, K defaultKey,
|
||||
WireFormat.FieldType valueType, V defaultValue) {
|
||||
return new MapEntry<K, V>(
|
||||
descriptor, keyType, defaultKey, valueType, defaultValue);
|
||||
}
|
||||
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public V getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
private volatile int cachedSerializedSize = -1;
|
||||
|
||||
@Override
|
||||
public int getSerializedSize() {
|
||||
if (cachedSerializedSize != -1) {
|
||||
return cachedSerializedSize;
|
||||
}
|
||||
|
||||
int size = MapEntryLite.computeSerializedSize(metadata, key, value);
|
||||
cachedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(CodedOutputStream output) throws IOException {
|
||||
MapEntryLite.writeTo(output, metadata, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInitialized() {
|
||||
return isInitialized(metadata, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Parser<MapEntry<K, V>> getParserForType() {
|
||||
return metadata.parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> newBuilderForType() {
|
||||
return new Builder<K, V>(metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> toBuilder() {
|
||||
return new Builder<K, V>(metadata, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapEntry<K, V> getDefaultInstanceForType() {
|
||||
return new MapEntry<K, V>(metadata, metadata.defaultKey, metadata.defaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Descriptor getDescriptorForType() {
|
||||
return metadata.descriptor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<FieldDescriptor, Object> getAllFields() {
|
||||
TreeMap<FieldDescriptor, Object> result = new TreeMap<FieldDescriptor, Object>();
|
||||
for (final FieldDescriptor field : metadata.descriptor.getFields()) {
|
||||
if (hasField(field)) {
|
||||
result.put(field, getField(field));
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableMap(result);
|
||||
}
|
||||
|
||||
private void checkFieldDescriptor(FieldDescriptor field) {
|
||||
if (field.getContainingType() != metadata.descriptor) {
|
||||
throw new RuntimeException(
|
||||
"Wrong FieldDescriptor \"" + field.getFullName()
|
||||
+ "\" used in message \"" + metadata.descriptor.getFullName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasField(FieldDescriptor field) {
|
||||
checkFieldDescriptor(field);;
|
||||
// A MapEntry always contains two fields.
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getField(FieldDescriptor field) {
|
||||
checkFieldDescriptor(field);
|
||||
Object result = field.getNumber() == 1 ? getKey() : getValue();
|
||||
// Convert enums to EnumValueDescriptor.
|
||||
if (field.getType() == FieldDescriptor.Type.ENUM) {
|
||||
result = field.getEnumType().findValueByNumberCreatingIfUnknown(
|
||||
(java.lang.Integer) result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRepeatedFieldCount(FieldDescriptor field) {
|
||||
throw new RuntimeException(
|
||||
"There is no repeated field in a map entry message.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRepeatedField(FieldDescriptor field, int index) {
|
||||
throw new RuntimeException(
|
||||
"There is no repeated field in a map entry message.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnknownFieldSet getUnknownFields() {
|
||||
return UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder to create {@link MapEntry} messages.
|
||||
*/
|
||||
public static class Builder<K, V>
|
||||
extends AbstractMessage.Builder<Builder<K, V>> {
|
||||
private final Metadata<K, V> metadata;
|
||||
private K key;
|
||||
private V value;
|
||||
|
||||
private Builder(Metadata<K, V> metadata) {
|
||||
this(metadata, metadata.defaultKey, metadata.defaultValue);
|
||||
}
|
||||
|
||||
private Builder(Metadata<K, V> metadata, K key, V value) {
|
||||
this.metadata = metadata;
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public V getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public Builder<K, V> setKey(K key) {
|
||||
this.key = key;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder<K, V> clearKey() {
|
||||
this.key = metadata.defaultKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder<K, V> setValue(V value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder<K, V> clearValue() {
|
||||
this.value = metadata.defaultValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapEntry<K, V> build() {
|
||||
MapEntry<K, V> result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapEntry<K, V> buildPartial() {
|
||||
return new MapEntry<K, V>(metadata, key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Descriptor getDescriptorForType() {
|
||||
return metadata.descriptor;
|
||||
}
|
||||
|
||||
private void checkFieldDescriptor(FieldDescriptor field) {
|
||||
if (field.getContainingType() != metadata.descriptor) {
|
||||
throw new RuntimeException(
|
||||
"Wrong FieldDescriptor \"" + field.getFullName()
|
||||
+ "\" used in message \"" + metadata.descriptor.getFullName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Message.Builder newBuilderForField(FieldDescriptor field) {
|
||||
checkFieldDescriptor(field);;
|
||||
// This method should be called for message fields and in a MapEntry
|
||||
// message only the value field can possibly be a message field.
|
||||
if (field.getNumber() != 2
|
||||
|| field.getJavaType() != FieldDescriptor.JavaType.MESSAGE) {
|
||||
throw new RuntimeException(
|
||||
"\"" + field.getFullName() + "\" is not a message value field.");
|
||||
}
|
||||
return ((Message) value).newBuilderForType();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Builder<K, V> setField(FieldDescriptor field, Object value) {
|
||||
checkFieldDescriptor(field);
|
||||
if (field.getNumber() == 1) {
|
||||
setKey((K) value);
|
||||
} else {
|
||||
if (field.getType() == FieldDescriptor.Type.ENUM) {
|
||||
value = ((EnumValueDescriptor) value).getNumber();
|
||||
} else if (field.getType() == FieldDescriptor.Type.MESSAGE) {
|
||||
if (value != null && !metadata.defaultValue.getClass().isInstance(value)) {
|
||||
// The value is not the exact right message type. However, if it
|
||||
// is an alternative implementation of the same type -- e.g. a
|
||||
// DynamicMessage -- we should accept it. In this case we can make
|
||||
// a copy of the message.
|
||||
value =
|
||||
((Message) metadata.defaultValue).toBuilder().mergeFrom((Message) value).build();
|
||||
}
|
||||
}
|
||||
setValue((V) value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> clearField(FieldDescriptor field) {
|
||||
checkFieldDescriptor(field);
|
||||
if (field.getNumber() == 1) {
|
||||
clearKey();
|
||||
} else {
|
||||
clearValue();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> setRepeatedField(FieldDescriptor field, int index,
|
||||
Object value) {
|
||||
throw new RuntimeException(
|
||||
"There is no repeated field in a map entry message.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> addRepeatedField(FieldDescriptor field, Object value) {
|
||||
throw new RuntimeException(
|
||||
"There is no repeated field in a map entry message.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> setUnknownFields(UnknownFieldSet unknownFields) {
|
||||
// Unknown fields are discarded for MapEntry message.
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapEntry<K, V> getDefaultInstanceForType() {
|
||||
return new MapEntry<K, V>(metadata, metadata.defaultKey, metadata.defaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInitialized() {
|
||||
return MapEntry.isInitialized(metadata, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<FieldDescriptor, Object> getAllFields() {
|
||||
final TreeMap<FieldDescriptor, Object> result = new TreeMap<FieldDescriptor, Object>();
|
||||
for (final FieldDescriptor field : metadata.descriptor.getFields()) {
|
||||
if (hasField(field)) {
|
||||
result.put(field, getField(field));
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableMap(result);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasField(FieldDescriptor field) {
|
||||
checkFieldDescriptor(field);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getField(FieldDescriptor field) {
|
||||
checkFieldDescriptor(field);
|
||||
Object result = field.getNumber() == 1 ? getKey() : getValue();
|
||||
// Convert enums to EnumValueDescriptor.
|
||||
if (field.getType() == FieldDescriptor.Type.ENUM) {
|
||||
result = field.getEnumType().findValueByNumberCreatingIfUnknown((Integer) result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRepeatedFieldCount(FieldDescriptor field) {
|
||||
throw new RuntimeException(
|
||||
"There is no repeated field in a map entry message.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRepeatedField(FieldDescriptor field, int index) {
|
||||
throw new RuntimeException(
|
||||
"There is no repeated field in a map entry message.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnknownFieldSet getUnknownFields() {
|
||||
return UnknownFieldSet.getDefaultInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder<K, V> clone() {
|
||||
return new Builder(metadata, key, value);
|
||||
}
|
||||
}
|
||||
|
||||
private static <V> boolean isInitialized(Metadata metadata, V value) {
|
||||
if (metadata.valueType.getJavaType() == WireFormat.JavaType.MESSAGE) {
|
||||
return ((MessageLite) value).isInitialized();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -1,226 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Implements the lite version of map entry messages.
|
||||
*
|
||||
* This class serves as an utility class to help do serialization/parsing of
|
||||
* map entries. It's used in generated code and also in the full version
|
||||
* MapEntry message.
|
||||
*
|
||||
* Protobuf internal. Users shouldn't use.
|
||||
*/
|
||||
public class MapEntryLite<K, V> {
|
||||
|
||||
static class Metadata<K, V> {
|
||||
public final WireFormat.FieldType keyType;
|
||||
public final K defaultKey;
|
||||
public final WireFormat.FieldType valueType;
|
||||
public final V defaultValue;
|
||||
|
||||
public Metadata(
|
||||
WireFormat.FieldType keyType, K defaultKey,
|
||||
WireFormat.FieldType valueType, V defaultValue) {
|
||||
this.keyType = keyType;
|
||||
this.defaultKey = defaultKey;
|
||||
this.valueType = valueType;
|
||||
this.defaultValue = defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
private static final int KEY_FIELD_NUMBER = 1;
|
||||
private static final int VALUE_FIELD_NUMBER = 2;
|
||||
|
||||
private final Metadata<K, V> metadata;
|
||||
private final K key;
|
||||
private final V value;
|
||||
|
||||
/** Creates a default MapEntryLite message instance. */
|
||||
private MapEntryLite(
|
||||
WireFormat.FieldType keyType, K defaultKey,
|
||||
WireFormat.FieldType valueType, V defaultValue) {
|
||||
this.metadata = new Metadata<K, V>(keyType, defaultKey, valueType, defaultValue);
|
||||
this.key = defaultKey;
|
||||
this.value = defaultValue;
|
||||
}
|
||||
|
||||
/** Creates a new MapEntryLite message. */
|
||||
private MapEntryLite(Metadata<K, V> metadata, K key, V value) {
|
||||
this.metadata = metadata;
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public K getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public V getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a default MapEntryLite message instance.
|
||||
*
|
||||
* This method is used by generated code to create the default instance for
|
||||
* a map entry message. The created default instance should be used to create
|
||||
* new map entry messages of the same type. For each map entry message, only
|
||||
* one default instance should be created.
|
||||
*/
|
||||
public static <K, V> MapEntryLite<K, V> newDefaultInstance(
|
||||
WireFormat.FieldType keyType, K defaultKey,
|
||||
WireFormat.FieldType valueType, V defaultValue) {
|
||||
return new MapEntryLite<K, V>(
|
||||
keyType, defaultKey, valueType, defaultValue);
|
||||
}
|
||||
|
||||
static <K, V> void writeTo(CodedOutputStream output, Metadata<K, V> metadata, K key, V value)
|
||||
throws IOException {
|
||||
FieldSet.writeElement(output, metadata.keyType, KEY_FIELD_NUMBER, key);
|
||||
FieldSet.writeElement(output, metadata.valueType, VALUE_FIELD_NUMBER, value);
|
||||
}
|
||||
|
||||
static <K, V> int computeSerializedSize(Metadata<K, V> metadata, K key, V value) {
|
||||
return FieldSet.computeElementSize(metadata.keyType, KEY_FIELD_NUMBER, key)
|
||||
+ FieldSet.computeElementSize(metadata.valueType, VALUE_FIELD_NUMBER, value);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <T> T parseField(
|
||||
CodedInputStream input, ExtensionRegistryLite extensionRegistry,
|
||||
WireFormat.FieldType type, T value) throws IOException {
|
||||
switch (type) {
|
||||
case MESSAGE:
|
||||
MessageLite.Builder subBuilder = ((MessageLite) value).toBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
return (T) subBuilder.buildPartial();
|
||||
case ENUM:
|
||||
return (T) (java.lang.Integer) input.readEnum();
|
||||
case GROUP:
|
||||
throw new RuntimeException("Groups are not allowed in maps.");
|
||||
default:
|
||||
return (T) FieldSet.readPrimitiveField(input, type, true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes the provided key and value as though they were wrapped by a {@link MapEntryLite}
|
||||
* to the output stream. This helper method avoids allocation of a {@link MapEntryLite}
|
||||
* built with a key and value and is called from generated code directly.
|
||||
*/
|
||||
public void serializeTo(CodedOutputStream output, int fieldNumber, K key, V value)
|
||||
throws IOException {
|
||||
output.writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED);
|
||||
output.writeUInt32NoTag(computeSerializedSize(metadata, key, value));
|
||||
writeTo(output, metadata, key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the message size for the provided key and value as though they were wrapped
|
||||
* by a {@link MapEntryLite}. This helper method avoids allocation of a {@link MapEntryLite}
|
||||
* built with a key and value and is called from generated code directly.
|
||||
*/
|
||||
public int computeMessageSize(int fieldNumber, K key, V value) {
|
||||
return CodedOutputStream.computeTagSize(fieldNumber)
|
||||
+ CodedOutputStream.computeLengthDelimitedFieldSize(
|
||||
computeSerializedSize(metadata, key, value));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an entry off of the input as a {@link Map.Entry}. This helper requires an allocation
|
||||
* so using {@link #parseInto} is preferred if possible.
|
||||
*/
|
||||
public Map.Entry<K, V> parseEntry(ByteString bytes, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException {
|
||||
return parseEntry(bytes.newCodedInput(), metadata, extensionRegistry);
|
||||
}
|
||||
|
||||
static <K, V> Map.Entry<K, V> parseEntry(
|
||||
CodedInputStream input, Metadata<K, V> metadata, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException{
|
||||
K key = metadata.defaultKey;
|
||||
V value = metadata.defaultValue;
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
if (tag == 0) {
|
||||
break;
|
||||
}
|
||||
if (tag == WireFormat.makeTag(KEY_FIELD_NUMBER, metadata.keyType.getWireType())) {
|
||||
key = parseField(input, extensionRegistry, metadata.keyType, key);
|
||||
} else if (tag == WireFormat.makeTag(VALUE_FIELD_NUMBER, metadata.valueType.getWireType())) {
|
||||
value = parseField(input, extensionRegistry, metadata.valueType, value);
|
||||
} else {
|
||||
if (!input.skipField(tag)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new AbstractMap.SimpleImmutableEntry<K, V>(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an entry off of the input into the map. This helper avoids allocaton of a
|
||||
* {@link MapEntryLite} by parsing directly into the provided {@link MapFieldLite}.
|
||||
*/
|
||||
public void parseInto(
|
||||
MapFieldLite<K, V> map, CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException {
|
||||
int length = input.readRawVarint32();
|
||||
final int oldLimit = input.pushLimit(length);
|
||||
K key = metadata.defaultKey;
|
||||
V value = metadata.defaultValue;
|
||||
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
if (tag == 0) {
|
||||
break;
|
||||
}
|
||||
if (tag == WireFormat.makeTag(KEY_FIELD_NUMBER, metadata.keyType.getWireType())) {
|
||||
key = parseField(input, extensionRegistry, metadata.keyType, key);
|
||||
} else if (tag == WireFormat.makeTag(VALUE_FIELD_NUMBER, metadata.valueType.getWireType())) {
|
||||
value = parseField(input, extensionRegistry, metadata.valueType, value);
|
||||
} else {
|
||||
if (!input.skipField(tag)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
input.checkLastTagWas(0);
|
||||
input.popLimit(oldLimit);
|
||||
map.put(key, value);
|
||||
}
|
||||
}
|
|
@ -1,624 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Internal representation of map fields in generated messages.
|
||||
*
|
||||
* This class supports accessing the map field as a {@link Map} to be used in
|
||||
* generated API and also supports accessing the field as a {@link List} to be
|
||||
* used in reflection API. It keeps track of where the data is currently stored
|
||||
* and do necessary conversions between map and list.
|
||||
*
|
||||
* This class is a protobuf implementation detail. Users shouldn't use this
|
||||
* class directly.
|
||||
*
|
||||
* THREAD-SAFETY NOTE: Read-only access is thread-safe. Users can call getMap()
|
||||
* and getList() concurrently in multiple threads. If write-access is needed,
|
||||
* all access must be synchronized.
|
||||
*/
|
||||
public class MapField<K, V> implements MutabilityOracle {
|
||||
/**
|
||||
* Indicates where the data of this map field is currently stored.
|
||||
*
|
||||
* MAP: Data is stored in mapData.
|
||||
* LIST: Data is stored in listData.
|
||||
* BOTH: mapData and listData have the same data.
|
||||
*
|
||||
* When the map field is accessed (through generated API or reflection API),
|
||||
* it will shift between these 3 modes:
|
||||
*
|
||||
* getMap() getList() getMutableMap() getMutableList()
|
||||
* MAP MAP BOTH MAP LIST
|
||||
* LIST BOTH LIST MAP LIST
|
||||
* BOTH BOTH BOTH MAP LIST
|
||||
*
|
||||
* As the map field changes its mode, the list/map reference returned in a
|
||||
* previous method call may be invalidated.
|
||||
*/
|
||||
private enum StorageMode {MAP, LIST, BOTH}
|
||||
|
||||
private volatile boolean isMutable;
|
||||
private volatile StorageMode mode;
|
||||
private MutatabilityAwareMap<K, V> mapData;
|
||||
private List<Message> listData;
|
||||
|
||||
// Convert between a map entry Message and a key-value pair.
|
||||
private static interface Converter<K, V> {
|
||||
Message convertKeyAndValueToMessage(K key, V value);
|
||||
void convertMessageToKeyAndValue(Message message, Map<K, V> map);
|
||||
|
||||
Message getMessageDefaultInstance();
|
||||
}
|
||||
|
||||
private static class ImmutableMessageConverter<K, V> implements Converter<K, V> {
|
||||
private final MapEntry<K, V> defaultEntry;
|
||||
public ImmutableMessageConverter(MapEntry<K, V> defaultEntry) {
|
||||
this.defaultEntry = defaultEntry;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Message convertKeyAndValueToMessage(K key, V value) {
|
||||
return defaultEntry.newBuilderForType().setKey(key).setValue(value).buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void convertMessageToKeyAndValue(Message message, Map<K, V> map) {
|
||||
MapEntry<K, V> entry = (MapEntry<K, V>) message;
|
||||
map.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Message getMessageDefaultInstance() {
|
||||
return defaultEntry;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private final Converter<K, V> converter;
|
||||
|
||||
private MapField(
|
||||
Converter<K, V> converter,
|
||||
StorageMode mode,
|
||||
Map<K, V> mapData) {
|
||||
this.converter = converter;
|
||||
this.isMutable = true;
|
||||
this.mode = mode;
|
||||
this.mapData = new MutatabilityAwareMap<K, V>(this, mapData);
|
||||
this.listData = null;
|
||||
}
|
||||
|
||||
private MapField(
|
||||
MapEntry<K, V> defaultEntry,
|
||||
StorageMode mode,
|
||||
Map<K, V> mapData) {
|
||||
this(new ImmutableMessageConverter<K, V>(defaultEntry), mode, mapData);
|
||||
}
|
||||
|
||||
|
||||
/** Returns an immutable empty MapField. */
|
||||
public static <K, V> MapField<K, V> emptyMapField(
|
||||
MapEntry<K, V> defaultEntry) {
|
||||
return new MapField<K, V>(
|
||||
defaultEntry, StorageMode.MAP, Collections.<K, V>emptyMap());
|
||||
}
|
||||
|
||||
|
||||
/** Creates a new mutable empty MapField. */
|
||||
public static <K, V> MapField<K, V> newMapField(MapEntry<K, V> defaultEntry) {
|
||||
return new MapField<K, V>(
|
||||
defaultEntry, StorageMode.MAP, new LinkedHashMap<K, V>());
|
||||
}
|
||||
|
||||
|
||||
private Message convertKeyAndValueToMessage(K key, V value) {
|
||||
return converter.convertKeyAndValueToMessage(key, value);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void convertMessageToKeyAndValue(Message message, Map<K, V> map) {
|
||||
converter.convertMessageToKeyAndValue(message, map);
|
||||
}
|
||||
|
||||
private List<Message> convertMapToList(MutatabilityAwareMap<K, V> mapData) {
|
||||
List<Message> listData = new ArrayList<Message>();
|
||||
for (Map.Entry<K, V> entry : mapData.entrySet()) {
|
||||
listData.add(
|
||||
convertKeyAndValueToMessage(
|
||||
entry.getKey(), entry.getValue()));
|
||||
}
|
||||
return listData;
|
||||
}
|
||||
|
||||
private MutatabilityAwareMap<K, V> convertListToMap(List<Message> listData) {
|
||||
Map<K, V> mapData = new LinkedHashMap<K, V>();
|
||||
for (Message item : listData) {
|
||||
convertMessageToKeyAndValue(item, mapData);
|
||||
}
|
||||
return new MutatabilityAwareMap<K, V>(this, mapData);
|
||||
}
|
||||
|
||||
/** Returns the content of this MapField as a read-only Map. */
|
||||
public Map<K, V> getMap() {
|
||||
if (mode == StorageMode.LIST) {
|
||||
synchronized (this) {
|
||||
if (mode == StorageMode.LIST) {
|
||||
mapData = convertListToMap(listData);
|
||||
mode = StorageMode.BOTH;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableMap(mapData);
|
||||
}
|
||||
|
||||
/** Gets a mutable Map view of this MapField. */
|
||||
public Map<K, V> getMutableMap() {
|
||||
if (mode != StorageMode.MAP) {
|
||||
if (mode == StorageMode.LIST) {
|
||||
mapData = convertListToMap(listData);
|
||||
}
|
||||
listData = null;
|
||||
mode = StorageMode.MAP;
|
||||
}
|
||||
return mapData;
|
||||
}
|
||||
|
||||
public void mergeFrom(MapField<K, V> other) {
|
||||
getMutableMap().putAll(MapFieldLite.copy(other.getMap()));
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
mapData = new MutatabilityAwareMap<K, V>(this, new LinkedHashMap<K, V>());
|
||||
mode = StorageMode.MAP;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
if (!(object instanceof MapField)) {
|
||||
return false;
|
||||
}
|
||||
MapField<K, V> other = (MapField<K, V>) object;
|
||||
return MapFieldLite.<K, V>equals(getMap(), other.getMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return MapFieldLite.<K, V>calculateHashCodeForMap(getMap());
|
||||
}
|
||||
|
||||
/** Returns a deep copy of this MapField. */
|
||||
public MapField<K, V> copy() {
|
||||
return new MapField<K, V>(
|
||||
converter, StorageMode.MAP, MapFieldLite.copy(getMap()));
|
||||
}
|
||||
|
||||
/** Gets the content of this MapField as a read-only List. */
|
||||
List<Message> getList() {
|
||||
if (mode == StorageMode.MAP) {
|
||||
synchronized (this) {
|
||||
if (mode == StorageMode.MAP) {
|
||||
listData = convertMapToList(mapData);
|
||||
mode = StorageMode.BOTH;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Collections.unmodifiableList(listData);
|
||||
}
|
||||
|
||||
/** Gets a mutable List view of this MapField. */
|
||||
List<Message> getMutableList() {
|
||||
if (mode != StorageMode.LIST) {
|
||||
if (mode == StorageMode.MAP) {
|
||||
listData = convertMapToList(mapData);
|
||||
}
|
||||
mapData = null;
|
||||
mode = StorageMode.LIST;
|
||||
}
|
||||
return listData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the default instance of the message stored in the list view of this
|
||||
* map field.
|
||||
*/
|
||||
Message getMapEntryMessageDefaultInstance() {
|
||||
return converter.getMessageDefaultInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes this list immutable. All subsequent modifications will throw an
|
||||
* {@link UnsupportedOperationException}.
|
||||
*/
|
||||
public void makeImmutable() {
|
||||
isMutable = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether this field can be modified.
|
||||
*/
|
||||
public boolean isMutable() {
|
||||
return isMutable;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.hbase.shaded.com.google.protobuf.MutabilityOracle#ensureMutable()
|
||||
*/
|
||||
@Override
|
||||
public void ensureMutable() {
|
||||
if (!isMutable()) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An internal map that checks for mutability before delegating.
|
||||
*/
|
||||
private static class MutatabilityAwareMap<K, V> implements Map<K, V> {
|
||||
private final MutabilityOracle mutabilityOracle;
|
||||
private final Map<K, V> delegate;
|
||||
|
||||
MutatabilityAwareMap(MutabilityOracle mutabilityOracle, Map<K, V> delegate) {
|
||||
this.mutabilityOracle = mutabilityOracle;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return delegate.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return delegate.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(Object key) {
|
||||
return delegate.containsKey(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(Object value) {
|
||||
return delegate.containsValue(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V get(Object key) {
|
||||
return delegate.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V put(K key, V value) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.put(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public V remove(Object key) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.remove(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<? extends K, ? extends V> m) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
delegate.putAll(m);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
mutabilityOracle.ensureMutable();
|
||||
delegate.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<K> keySet() {
|
||||
return new MutatabilityAwareSet<K>(mutabilityOracle, delegate.keySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<V> values() {
|
||||
return new MutatabilityAwareCollection<V>(mutabilityOracle, delegate.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<java.util.Map.Entry<K, V>> entrySet() {
|
||||
return new MutatabilityAwareSet<Entry<K, V>>(mutabilityOracle, delegate.entrySet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return delegate.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return delegate.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return delegate.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* An internal collection that checks for mutability before delegating.
|
||||
*/
|
||||
private static class MutatabilityAwareCollection<E> implements Collection<E> {
|
||||
private final MutabilityOracle mutabilityOracle;
|
||||
private final Collection<E> delegate;
|
||||
|
||||
MutatabilityAwareCollection(MutabilityOracle mutabilityOracle, Collection<E> delegate) {
|
||||
this.mutabilityOracle = mutabilityOracle;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return delegate.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return delegate.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return delegate.contains(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
return new MutatabilityAwareIterator<E>(mutabilityOracle, delegate.iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
return delegate.toArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
return delegate.toArray(a);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
// Unsupported operation in the delegate.
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.remove(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
return delegate.containsAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends E> c) {
|
||||
// Unsupported operation in the delegate.
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.removeAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.retainAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
mutabilityOracle.ensureMutable();
|
||||
delegate.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return delegate.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return delegate.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return delegate.toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An internal set that checks for mutability before delegating.
|
||||
*/
|
||||
private static class MutatabilityAwareSet<E> implements Set<E> {
|
||||
private final MutabilityOracle mutabilityOracle;
|
||||
private final Set<E> delegate;
|
||||
|
||||
MutatabilityAwareSet(MutabilityOracle mutabilityOracle, Set<E> delegate) {
|
||||
this.mutabilityOracle = mutabilityOracle;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return delegate.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return delegate.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return delegate.contains(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<E> iterator() {
|
||||
return new MutatabilityAwareIterator<E>(mutabilityOracle, delegate.iterator());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
return delegate.toArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
return delegate.toArray(a);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(E e) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.remove(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
return delegate.containsAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends E> c) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.addAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.retainAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
mutabilityOracle.ensureMutable();
|
||||
return delegate.removeAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
mutabilityOracle.ensureMutable();
|
||||
delegate.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return delegate.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return delegate.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return delegate.toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An internal iterator that checks for mutability before delegating.
|
||||
*/
|
||||
private static class MutatabilityAwareIterator<E> implements Iterator<E> {
|
||||
private final MutabilityOracle mutabilityOracle;
|
||||
private final Iterator<E> delegate;
|
||||
|
||||
MutatabilityAwareIterator(MutabilityOracle mutabilityOracle, Iterator<E> delegate) {
|
||||
this.mutabilityOracle = mutabilityOracle;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return delegate.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public E next() {
|
||||
return delegate.next();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
mutabilityOracle.ensureMutable();
|
||||
delegate.remove();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return delegate.equals(obj);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return delegate.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return delegate.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,224 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLite;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Internal representation of map fields in generated lite-runtime messages.
|
||||
*
|
||||
* This class is a protobuf implementation detail. Users shouldn't use this
|
||||
* class directly.
|
||||
*/
|
||||
public final class MapFieldLite<K, V> extends LinkedHashMap<K, V> {
|
||||
|
||||
private boolean isMutable;
|
||||
|
||||
private MapFieldLite() {
|
||||
this.isMutable = true;
|
||||
}
|
||||
|
||||
private MapFieldLite(Map<K, V> mapData) {
|
||||
super(mapData);
|
||||
this.isMutable = true;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"rawtypes", "unchecked"})
|
||||
private static final MapFieldLite EMPTY_MAP_FIELD = new MapFieldLite();
|
||||
static {
|
||||
EMPTY_MAP_FIELD.makeImmutable();
|
||||
}
|
||||
|
||||
/** Returns an singleton immutable empty MapFieldLite instance. */
|
||||
@SuppressWarnings({"unchecked", "cast"})
|
||||
public static <K, V> MapFieldLite<K, V> emptyMapField() {
|
||||
return (MapFieldLite<K, V>) EMPTY_MAP_FIELD;
|
||||
}
|
||||
|
||||
public void mergeFrom(MapFieldLite<K, V> other) {
|
||||
ensureMutable();
|
||||
if (!other.isEmpty()) {
|
||||
putAll(other);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "cast"})
|
||||
@Override public Set<Map.Entry<K, V>> entrySet() {
|
||||
return isEmpty() ? Collections.<Map.Entry<K, V>>emptySet() : super.entrySet();
|
||||
}
|
||||
|
||||
@Override public void clear() {
|
||||
ensureMutable();
|
||||
super.clear();
|
||||
}
|
||||
|
||||
@Override public V put(K key, V value) {
|
||||
ensureMutable();
|
||||
return super.put(key, value);
|
||||
}
|
||||
|
||||
public V put(Map.Entry<K, V> entry) {
|
||||
return put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
@Override public void putAll(Map<? extends K, ? extends V> m) {
|
||||
ensureMutable();
|
||||
super.putAll(m);
|
||||
}
|
||||
|
||||
@Override public V remove(Object key) {
|
||||
ensureMutable();
|
||||
return super.remove(key);
|
||||
}
|
||||
|
||||
private static boolean equals(Object a, Object b) {
|
||||
if (a instanceof byte[] && b instanceof byte[]) {
|
||||
return Arrays.equals((byte[]) a, (byte[]) b);
|
||||
}
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether two {@link Map}s are equal. We don't use the default equals
|
||||
* method of {@link Map} because it compares by identity not by content for
|
||||
* byte arrays.
|
||||
*/
|
||||
static <K, V> boolean equals(Map<K, V> a, Map<K, V> b) {
|
||||
if (a == b) {
|
||||
return true;
|
||||
}
|
||||
if (a.size() != b.size()) {
|
||||
return false;
|
||||
}
|
||||
for (Map.Entry<K, V> entry : a.entrySet()) {
|
||||
if (!b.containsKey(entry.getKey())) {
|
||||
return false;
|
||||
}
|
||||
if (!equals(entry.getValue(), b.get(entry.getKey()))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether two map fields are equal.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
return (object instanceof Map) && equals(this, (Map<K, V>) object);
|
||||
}
|
||||
|
||||
private static int calculateHashCodeForObject(Object a) {
|
||||
if (a instanceof byte[]) {
|
||||
return Internal.hashCode((byte[]) a);
|
||||
}
|
||||
// Enums should be stored as integers internally.
|
||||
if (a instanceof EnumLite) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
return a.hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the hash code for a {@link Map}. We don't use the default hash
|
||||
* code method of {@link Map} because for byte arrays and protobuf enums it
|
||||
* use {@link Object#hashCode()}.
|
||||
*/
|
||||
static <K, V> int calculateHashCodeForMap(Map<K, V> a) {
|
||||
int result = 0;
|
||||
for (Map.Entry<K, V> entry : a.entrySet()) {
|
||||
result += calculateHashCodeForObject(entry.getKey())
|
||||
^ calculateHashCodeForObject(entry.getValue());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return calculateHashCodeForMap(this);
|
||||
}
|
||||
|
||||
private static Object copy(Object object) {
|
||||
if (object instanceof byte[]) {
|
||||
byte[] data = (byte[]) object;
|
||||
return Arrays.copyOf(data, data.length);
|
||||
}
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a deep copy of a {@link Map}. Immutable objects in the map will be
|
||||
* shared (e.g., integers, strings, immutable messages) and mutable ones will
|
||||
* have a copy (e.g., byte arrays, mutable messages).
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
static <K, V> Map<K, V> copy(Map<K, V> map) {
|
||||
Map<K, V> result = new LinkedHashMap<K, V>();
|
||||
for (Map.Entry<K, V> entry : map.entrySet()) {
|
||||
result.put(entry.getKey(), (V) copy(entry.getValue()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Returns a deep copy of this map field. */
|
||||
public MapFieldLite<K, V> mutableCopy() {
|
||||
return isEmpty() ? new MapFieldLite<K, V>() : new MapFieldLite<K, V>(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes this field immutable. All subsequent modifications will throw an
|
||||
* {@link UnsupportedOperationException}.
|
||||
*/
|
||||
public void makeImmutable() {
|
||||
isMutable = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether this field can be modified.
|
||||
*/
|
||||
public boolean isMutable() {
|
||||
return isMutable;
|
||||
}
|
||||
|
||||
private void ensureMutable() {
|
||||
if (!isMutable()) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,292 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// TODO(kenton): Use generics? E.g. Builder<BuilderType extends Builder>, then
|
||||
// mergeFrom*() could return BuilderType for better type-safety.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Abstract interface implemented by Protocol Message objects.
|
||||
* <p>
|
||||
* See also {@link MessageLite}, which defines most of the methods that typical
|
||||
* users care about. {@link Message} adds to it methods that are not available
|
||||
* in the "lite" runtime. The biggest added features are introspection and
|
||||
* reflection -- i.e., getting descriptors for the message type and accessing
|
||||
* the field values dynamically.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public interface Message extends MessageLite, MessageOrBuilder {
|
||||
|
||||
// (From MessageLite, re-declared here only for return type covariance.)
|
||||
@Override
|
||||
Parser<? extends Message> getParserForType();
|
||||
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Comparison and hashing
|
||||
|
||||
/**
|
||||
* Compares the specified object with this message for equality. Returns
|
||||
* {@code true} if the given object is a message of the same type (as
|
||||
* defined by {@code getDescriptorForType()}) and has identical values for
|
||||
* all of its fields. Subclasses must implement this; inheriting
|
||||
* {@code Object.equals()} is incorrect.
|
||||
*
|
||||
* @param other object to be compared for equality with this message
|
||||
* @return {@code true} if the specified object is equal to this message
|
||||
*/
|
||||
@Override
|
||||
boolean equals(Object other);
|
||||
|
||||
/**
|
||||
* Returns the hash code value for this message. The hash code of a message
|
||||
* should mix the message's type (object identity of the descriptor) with its
|
||||
* contents (known and unknown field values). Subclasses must implement this;
|
||||
* inheriting {@code Object.hashCode()} is incorrect.
|
||||
*
|
||||
* @return the hash code value for this message
|
||||
* @see Map#hashCode()
|
||||
*/
|
||||
@Override
|
||||
int hashCode();
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Convenience methods.
|
||||
|
||||
/**
|
||||
* Converts the message to a string in protocol buffer text format. This is
|
||||
* just a trivial wrapper around {@link
|
||||
* TextFormat#printToString(MessageOrBuilder)}.
|
||||
*/
|
||||
@Override
|
||||
String toString();
|
||||
|
||||
// =================================================================
|
||||
// Builders
|
||||
|
||||
// (From MessageLite, re-declared here only for return type covariance.)
|
||||
@Override
|
||||
Builder newBuilderForType();
|
||||
|
||||
@Override
|
||||
Builder toBuilder();
|
||||
|
||||
/**
|
||||
* Abstract interface implemented by Protocol Message builders.
|
||||
*/
|
||||
interface Builder extends MessageLite.Builder, MessageOrBuilder {
|
||||
// (From MessageLite.Builder, re-declared here only for return type
|
||||
// covariance.)
|
||||
@Override
|
||||
Builder clear();
|
||||
|
||||
/**
|
||||
* Merge {@code other} into the message being built. {@code other} must
|
||||
* have the exact same type as {@code this} (i.e.
|
||||
* {@code getDescriptorForType() == other.getDescriptorForType()}).
|
||||
*
|
||||
* Merging occurs as follows. For each field:<br>
|
||||
* * For singular primitive fields, if the field is set in {@code other},
|
||||
* then {@code other}'s value overwrites the value in this message.<br>
|
||||
* * For singular message fields, if the field is set in {@code other},
|
||||
* it is merged into the corresponding sub-message of this message
|
||||
* using the same merging rules.<br>
|
||||
* * For repeated fields, the elements in {@code other} are concatenated
|
||||
* with the elements in this message.
|
||||
* * For oneof groups, if the other message has one of the fields set,
|
||||
* the group of this message is cleared and replaced by the field
|
||||
* of the other message, so that the oneof constraint is preserved.
|
||||
*
|
||||
* This is equivalent to the {@code Message::MergeFrom} method in C++.
|
||||
*/
|
||||
Builder mergeFrom(Message other);
|
||||
|
||||
// (From MessageLite.Builder, re-declared here only for return type
|
||||
// covariance.)
|
||||
@Override
|
||||
Message build();
|
||||
|
||||
@Override
|
||||
Message buildPartial();
|
||||
|
||||
@Override
|
||||
Builder clone();
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(CodedInputStream input) throws IOException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Get the message's type's descriptor.
|
||||
* See {@link Message#getDescriptorForType()}.
|
||||
*/
|
||||
@Override
|
||||
Descriptors.Descriptor getDescriptorForType();
|
||||
|
||||
/**
|
||||
* Create a Builder for messages of the appropriate type for the given
|
||||
* field. Messages built with this can then be passed to setField(),
|
||||
* setRepeatedField(), or addRepeatedField().
|
||||
*/
|
||||
Builder newBuilderForField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Get a nested builder instance for the given field.
|
||||
* <p>
|
||||
* Normally, we hold a reference to the immutable message object for the
|
||||
* message type field. Some implementations(the generated message builders),
|
||||
* however, can also hold a reference to the builder object (a nested
|
||||
* builder) for the field.
|
||||
* <p>
|
||||
* If the field is already backed up by a nested builder, the nested builder
|
||||
* will be returned. Otherwise, a new field builder will be created and
|
||||
* returned. The original message field (if exist) will be merged into the
|
||||
* field builder, which will then be nested into its parent builder.
|
||||
* <p>
|
||||
* NOTE: implementations that do not support nested builders will throw
|
||||
* <code>UnsupportedOperationException</code>.
|
||||
*/
|
||||
Builder getFieldBuilder(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Get a nested builder instance for the given repeated field instance.
|
||||
* <p>
|
||||
* Normally, we hold a reference to the immutable message object for the
|
||||
* message type field. Some implementations(the generated message builders),
|
||||
* however, can also hold a reference to the builder object (a nested
|
||||
* builder) for the field.
|
||||
* <p>
|
||||
* If the field is already backed up by a nested builder, the nested builder
|
||||
* will be returned. Otherwise, a new field builder will be created and
|
||||
* returned. The original message field (if exist) will be merged into the
|
||||
* field builder, which will then be nested into its parent builder.
|
||||
* <p>
|
||||
* NOTE: implementations that do not support nested builders will throw
|
||||
* <code>UnsupportedOperationException</code>.
|
||||
*/
|
||||
Builder getRepeatedFieldBuilder(Descriptors.FieldDescriptor field,
|
||||
int index);
|
||||
|
||||
/**
|
||||
* Sets a field to the given value. The value must be of the correct type
|
||||
* for this field, i.e. the same type that
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)} would return.
|
||||
*/
|
||||
Builder setField(Descriptors.FieldDescriptor field, Object value);
|
||||
|
||||
/**
|
||||
* Clears the field. This is exactly equivalent to calling the generated
|
||||
* "clear" accessor method corresponding to the field.
|
||||
*/
|
||||
Builder clearField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Clears the oneof. This is exactly equivalent to calling the generated
|
||||
* "clear" accessor method corresponding to the oneof.
|
||||
*/
|
||||
Builder clearOneof(Descriptors.OneofDescriptor oneof);
|
||||
|
||||
/**
|
||||
* Sets an element of a repeated field to the given value. The value must
|
||||
* be of the correct type for this field, i.e. the same type that
|
||||
* {@link Message#getRepeatedField(Descriptors.FieldDescriptor,int)} would
|
||||
* return.
|
||||
* @throws IllegalArgumentException The field is not a repeated field, or
|
||||
* {@code field.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
Builder setRepeatedField(Descriptors.FieldDescriptor field,
|
||||
int index, Object value);
|
||||
|
||||
/**
|
||||
* Like {@code setRepeatedField}, but appends the value as a new element.
|
||||
* @throws IllegalArgumentException The field is not a repeated field, or
|
||||
* {@code field.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
Builder addRepeatedField(Descriptors.FieldDescriptor field, Object value);
|
||||
|
||||
/** Set the {@link UnknownFieldSet} for this message. */
|
||||
Builder setUnknownFields(UnknownFieldSet unknownFields);
|
||||
|
||||
/**
|
||||
* Merge some unknown fields into the {@link UnknownFieldSet} for this
|
||||
* message.
|
||||
*/
|
||||
Builder mergeUnknownFields(UnknownFieldSet unknownFields);
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// Convenience methods.
|
||||
|
||||
// (From MessageLite.Builder, re-declared here only for return type
|
||||
// covariance.)
|
||||
@Override
|
||||
Builder mergeFrom(ByteString data) throws InvalidProtocolBufferException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(byte[] data) throws InvalidProtocolBufferException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(byte[] data, int off, int len) throws InvalidProtocolBufferException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(InputStream input) throws IOException;
|
||||
|
||||
@Override
|
||||
Builder mergeFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
|
||||
@Override
|
||||
boolean mergeDelimitedFrom(InputStream input) throws IOException;
|
||||
|
||||
@Override
|
||||
boolean mergeDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
}
|
||||
}
|
|
@ -1,341 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// TODO(kenton): Use generics? E.g. Builder<BuilderType extends Builder>, then
|
||||
// mergeFrom*() could return BuilderType for better type-safety.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Abstract interface implemented by Protocol Message objects.
|
||||
*
|
||||
* <p>This interface is implemented by all protocol message objects. Non-lite
|
||||
* messages additionally implement the Message interface, which is a subclass
|
||||
* of MessageLite. Use MessageLite instead when you only need the subset of
|
||||
* features which it supports -- namely, nothing that uses descriptors or
|
||||
* reflection. You can instruct the protocol compiler to generate classes
|
||||
* which implement only MessageLite, not the full Message interface, by adding
|
||||
* the follow line to the .proto file:
|
||||
* <pre>
|
||||
* option optimize_for = LITE_RUNTIME;
|
||||
* </pre>
|
||||
*
|
||||
* <p>This is particularly useful on resource-constrained systems where the
|
||||
* full protocol buffers runtime library is too big.
|
||||
*
|
||||
* <p>Note that on non-constrained systems (e.g. servers) when you need to link
|
||||
* in lots of protocol definitions, a better way to reduce total code footprint
|
||||
* is to use {@code optimize_for = CODE_SIZE}. This will make the generated
|
||||
* code smaller while still supporting all the same features (at the expense of
|
||||
* speed). {@code optimize_for = LITE_RUNTIME} is best when you only have a
|
||||
* small number of message types linked into your binary, in which case the
|
||||
* size of the protocol buffers runtime itself is the biggest problem.
|
||||
*
|
||||
* @author kenton@google.com Kenton Varda
|
||||
*/
|
||||
public interface MessageLite extends MessageLiteOrBuilder {
|
||||
|
||||
|
||||
/**
|
||||
* Serializes the message and writes it to {@code output}. This does not
|
||||
* flush or close the stream.
|
||||
*/
|
||||
void writeTo(CodedOutputStream output) throws IOException;
|
||||
|
||||
/**
|
||||
* Get the number of bytes required to encode this message. The result
|
||||
* is only computed on the first call and memoized after that.
|
||||
*/
|
||||
int getSerializedSize();
|
||||
|
||||
|
||||
/**
|
||||
* Gets the parser for a message of the same type as this message.
|
||||
*/
|
||||
Parser<? extends MessageLite> getParserForType();
|
||||
|
||||
// -----------------------------------------------------------------
|
||||
// Convenience methods.
|
||||
|
||||
/**
|
||||
* Serializes the message to a {@code ByteString} and returns it. This is
|
||||
* just a trivial wrapper around
|
||||
* {@link #writeTo(CodedOutputStream)}.
|
||||
*/
|
||||
ByteString toByteString();
|
||||
|
||||
/**
|
||||
* Serializes the message to a {@code byte} array and returns it. This is
|
||||
* just a trivial wrapper around
|
||||
* {@link #writeTo(CodedOutputStream)}.
|
||||
*/
|
||||
byte[] toByteArray();
|
||||
|
||||
/**
|
||||
* Serializes the message and writes it to {@code output}. This is just a
|
||||
* trivial wrapper around {@link #writeTo(CodedOutputStream)}. This does
|
||||
* not flush or close the stream.
|
||||
* <p>
|
||||
* NOTE: Protocol Buffers are not self-delimiting. Therefore, if you write
|
||||
* any more data to the stream after the message, you must somehow ensure
|
||||
* that the parser on the receiving end does not interpret this as being
|
||||
* part of the protocol message. This can be done e.g. by writing the size
|
||||
* of the message before the data, then making sure to limit the input to
|
||||
* that size on the receiving end (e.g. by wrapping the InputStream in one
|
||||
* which limits the input). Alternatively, just use
|
||||
* {@link #writeDelimitedTo(OutputStream)}.
|
||||
*/
|
||||
void writeTo(OutputStream output) throws IOException;
|
||||
|
||||
/**
|
||||
* Like {@link #writeTo(OutputStream)}, but writes the size of the message
|
||||
* as a varint before writing the data. This allows more data to be written
|
||||
* to the stream after the message without the need to delimit the message
|
||||
* data yourself. Use {@link Builder#mergeDelimitedFrom(InputStream)} (or
|
||||
* the static method {@code YourMessageType.parseDelimitedFrom(InputStream)})
|
||||
* to parse messages written by this method.
|
||||
*/
|
||||
void writeDelimitedTo(OutputStream output) throws IOException;
|
||||
|
||||
|
||||
// =================================================================
|
||||
// Builders
|
||||
|
||||
/**
|
||||
* Constructs a new builder for a message of the same type as this message.
|
||||
*/
|
||||
Builder newBuilderForType();
|
||||
|
||||
/**
|
||||
* Constructs a builder initialized with the current message. Use this to
|
||||
* derive a new message from the current one.
|
||||
*/
|
||||
Builder toBuilder();
|
||||
|
||||
/**
|
||||
* Abstract interface implemented by Protocol Message builders.
|
||||
*/
|
||||
interface Builder extends MessageLiteOrBuilder, Cloneable {
|
||||
/** Resets all fields to their default values. */
|
||||
Builder clear();
|
||||
|
||||
/**
|
||||
* Constructs the message based on the state of the Builder. Subsequent
|
||||
* changes to the Builder will not affect the returned message.
|
||||
* @throws UninitializedMessageException The message is missing one or more
|
||||
* required fields (i.e. {@link #isInitialized()} returns false).
|
||||
* Use {@link #buildPartial()} to bypass this check.
|
||||
*/
|
||||
MessageLite build();
|
||||
|
||||
/**
|
||||
* Like {@link #build()}, but does not throw an exception if the message
|
||||
* is missing required fields. Instead, a partial message is returned.
|
||||
* Subsequent changes to the Builder will not affect the returned message.
|
||||
*/
|
||||
MessageLite buildPartial();
|
||||
|
||||
/**
|
||||
* Clones the Builder.
|
||||
* @see Object#clone()
|
||||
*/
|
||||
Builder clone();
|
||||
|
||||
/**
|
||||
* Parses a message of this type from the input and merges it with this
|
||||
* message.
|
||||
*
|
||||
* <p>Warning: This does not verify that all required fields are present in
|
||||
* the input message. If you call {@link #build()} without setting all
|
||||
* required fields, it will throw an {@link UninitializedMessageException},
|
||||
* which is a {@code RuntimeException} and thus might not be caught. There
|
||||
* are a few good ways to deal with this:
|
||||
* <ul>
|
||||
* <li>Call {@link #isInitialized()} to verify that all required fields
|
||||
* are set before building.
|
||||
* <li>Use {@code buildPartial()} to build, which ignores missing
|
||||
* required fields.
|
||||
* </ul>
|
||||
*
|
||||
* <p>Note: The caller should call
|
||||
* {@link CodedInputStream#checkLastTagWas(int)} after calling this to
|
||||
* verify that the last tag seen was the appropriate end-group tag,
|
||||
* or zero for EOF.
|
||||
*/
|
||||
Builder mergeFrom(CodedInputStream input) throws IOException;
|
||||
|
||||
/**
|
||||
* Like {@link Builder#mergeFrom(CodedInputStream)}, but also
|
||||
* parses extensions. The extensions that you want to be able to parse
|
||||
* must be registered in {@code extensionRegistry}. Extensions not in
|
||||
* the registry will be treated as unknown fields.
|
||||
*/
|
||||
Builder mergeFrom(CodedInputStream input,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// Convenience methods.
|
||||
|
||||
/**
|
||||
* Parse {@code data} as a message of this type and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(ByteString data) throws InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* Parse {@code data} as a message of this type and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(ByteString data,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* Parse {@code data} as a message of this type and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(byte[] data) throws InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* Parse {@code data} as a message of this type and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(byte[] data, int off, int len)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* Parse {@code data} as a message of this type and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(byte[] data,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* Parse {@code data} as a message of this type and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(byte[] data, int off, int len,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* Parse a message of this type from {@code input} and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream)}. Note that this method always
|
||||
* reads the <i>entire</i> input (unless it throws an exception). If you
|
||||
* want it to stop earlier, you will need to wrap your input in some
|
||||
* wrapper stream that limits reading. Or, use
|
||||
* {@link MessageLite#writeDelimitedTo(OutputStream)} to write your message
|
||||
* and {@link #mergeDelimitedFrom(InputStream)} to read it.
|
||||
* <p>
|
||||
* Despite usually reading the entire input, this does not close the stream.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(InputStream input) throws IOException;
|
||||
|
||||
/**
|
||||
* Parse a message of this type from {@code input} and merge it with the
|
||||
* message being built. This is just a small wrapper around
|
||||
* {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
Builder mergeFrom(InputStream input,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Merge {@code other} into the message being built. {@code other} must
|
||||
* have the exact same type as {@code this} (i.e.
|
||||
* {@code getClass().equals(getDefaultInstanceForType().getClass())}).
|
||||
*
|
||||
* Merging occurs as follows. For each field:<br>
|
||||
* * For singular primitive fields, if the field is set in {@code other},
|
||||
* then {@code other}'s value overwrites the value in this message.<br>
|
||||
* * For singular message fields, if the field is set in {@code other},
|
||||
* it is merged into the corresponding sub-message of this message
|
||||
* using the same merging rules.<br>
|
||||
* * For repeated fields, the elements in {@code other} are concatenated
|
||||
* with the elements in this message.
|
||||
* * For oneof groups, if the other message has one of the fields set,
|
||||
* the group of this message is cleared and replaced by the field
|
||||
* of the other message, so that the oneof constraint is preserved.
|
||||
*
|
||||
* This is equivalent to the {@code Message::MergeFrom} method in C++.
|
||||
*/
|
||||
Builder mergeFrom(MessageLite other);
|
||||
|
||||
/**
|
||||
* Like {@link #mergeFrom(InputStream)}, but does not read until EOF.
|
||||
* Instead, the size of the message (encoded as a varint) is read first,
|
||||
* then the message data. Use
|
||||
* {@link MessageLite#writeDelimitedTo(OutputStream)} to write messages in
|
||||
* this format.
|
||||
*
|
||||
* @return True if successful, or false if the stream is at EOF when the
|
||||
* method starts. Any other error (including reaching EOF during
|
||||
* parsing) will cause an exception to be thrown.
|
||||
*/
|
||||
boolean mergeDelimitedFrom(InputStream input)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Like {@link #mergeDelimitedFrom(InputStream)} but supporting extensions.
|
||||
*/
|
||||
boolean mergeDelimitedFrom(InputStream input,
|
||||
ExtensionRegistryLite extensionRegistry)
|
||||
throws IOException;
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
/**
|
||||
* Base interface for methods common to {@link MessageLite}
|
||||
* and {@link MessageLite.Builder} to provide type equivalency.
|
||||
*
|
||||
* @author jonp@google.com (Jon Perlow)
|
||||
*/
|
||||
public interface MessageLiteOrBuilder {
|
||||
/**
|
||||
* Get an instance of the type with no fields set. Because no fields are set,
|
||||
* all getters for singular fields will return default values and repeated
|
||||
* fields will appear empty.
|
||||
* This may or may not be a singleton. This differs from the
|
||||
* {@code getDefaultInstance()} method of generated message classes in that
|
||||
* this method is an abstract method of the {@code MessageLite} interface
|
||||
* whereas {@code getDefaultInstance()} is a static method of a specific
|
||||
* class. They return the same thing.
|
||||
*/
|
||||
MessageLite getDefaultInstanceForType();
|
||||
|
||||
/**
|
||||
* Returns true if all required fields in the message and all embedded
|
||||
* messages are set, false otherwise.
|
||||
*
|
||||
* <p>See also: {@link MessageOrBuilder#getInitializationErrorString()}
|
||||
*/
|
||||
boolean isInitialized();
|
||||
|
||||
}
|
|
@ -1,239 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
* Helps generate {@link String} representations of {@link MessageLite} protos.
|
||||
*/
|
||||
// TODO(dweis): Fix map fields.
|
||||
final class MessageLiteToString {
|
||||
|
||||
private static final String LIST_SUFFIX = "List";
|
||||
private static final String BUILDER_LIST_SUFFIX = "OrBuilderList";
|
||||
private static final String BYTES_SUFFIX = "Bytes";
|
||||
|
||||
/**
|
||||
* Returns a {@link String} representation of the {@link MessageLite} object. The first line of
|
||||
* the {@code String} representation representation includes a comment string to uniquely identify
|
||||
* the objcet instance. This acts as an indicator that this should not be relied on for
|
||||
* comparisons.
|
||||
*
|
||||
* <p>For use by generated code only.
|
||||
*/
|
||||
static String toString(MessageLite messageLite, String commentString) {
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
buffer.append("# ").append(commentString);
|
||||
reflectivePrintWithIndent(messageLite, buffer, 0);
|
||||
return buffer.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Reflectively prints the {@link MessageLite} to the buffer at given {@code indent} level.
|
||||
*
|
||||
* @param buffer the buffer to write to
|
||||
* @param indent the number of spaces to indent the proto by
|
||||
*/
|
||||
private static void reflectivePrintWithIndent(
|
||||
MessageLite messageLite, StringBuilder buffer, int indent) {
|
||||
// Build a map of method name to method. We're looking for methods like getFoo(), hasFoo(), and
|
||||
// getFooList() which might be useful for building an object's string representation.
|
||||
Map<String, Method> nameToNoArgMethod = new HashMap<String, Method>();
|
||||
Map<String, Method> nameToMethod = new HashMap<String, Method>();
|
||||
Set<String> getters = new TreeSet<String>();
|
||||
for (Method method : messageLite.getClass().getDeclaredMethods()) {
|
||||
nameToMethod.put(method.getName(), method);
|
||||
if (method.getParameterTypes().length == 0) {
|
||||
nameToNoArgMethod.put(method.getName(), method);
|
||||
|
||||
if (method.getName().startsWith("get")) {
|
||||
getters.add(method.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (String getter : getters) {
|
||||
String suffix = getter.replaceFirst("get", "");
|
||||
if (suffix.endsWith(LIST_SUFFIX) && !suffix.endsWith(BUILDER_LIST_SUFFIX)) {
|
||||
String camelCase = suffix.substring(0, 1).toLowerCase()
|
||||
+ suffix.substring(1, suffix.length() - LIST_SUFFIX.length());
|
||||
// Try to reflectively get the value and toString() the field as if it were repeated. This
|
||||
// only works if the method names have not be proguarded out or renamed.
|
||||
Method listMethod = nameToNoArgMethod.get("get" + suffix);
|
||||
if (listMethod != null && listMethod.getReturnType().equals(List.class)) {
|
||||
printField(
|
||||
buffer,
|
||||
indent,
|
||||
camelCaseToSnakeCase(camelCase),
|
||||
GeneratedMessageLite.invokeOrDie(listMethod, messageLite));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
Method setter = nameToMethod.get("set" + suffix);
|
||||
if (setter == null) {
|
||||
continue;
|
||||
}
|
||||
if (suffix.endsWith(BYTES_SUFFIX)
|
||||
&& nameToNoArgMethod.containsKey(
|
||||
"get" + suffix.substring(0, suffix.length() - "Bytes".length()))) {
|
||||
// Heuristic to skip bytes based accessors for string fields.
|
||||
continue;
|
||||
}
|
||||
|
||||
String camelCase = suffix.substring(0, 1).toLowerCase() + suffix.substring(1);
|
||||
|
||||
// Try to reflectively get the value and toString() the field as if it were optional. This
|
||||
// only works if the method names have not be proguarded out or renamed.
|
||||
Method getMethod = nameToNoArgMethod.get("get" + suffix);
|
||||
Method hasMethod = nameToNoArgMethod.get("has" + suffix);
|
||||
// TODO(dweis): Fix proto3 semantics.
|
||||
if (getMethod != null) {
|
||||
Object value = GeneratedMessageLite.invokeOrDie(getMethod, messageLite);
|
||||
final boolean hasValue = hasMethod == null
|
||||
? !isDefaultValue(value)
|
||||
: (Boolean) GeneratedMessageLite.invokeOrDie(hasMethod, messageLite);
|
||||
// TODO(dweis): This doesn't stop printing oneof case twice: value and enum style.
|
||||
if (hasValue) {
|
||||
printField(
|
||||
buffer,
|
||||
indent,
|
||||
camelCaseToSnakeCase(camelCase),
|
||||
value);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (messageLite instanceof GeneratedMessageLite.ExtendableMessage) {
|
||||
Iterator<Map.Entry<GeneratedMessageLite.ExtensionDescriptor, Object>> iter =
|
||||
((GeneratedMessageLite.ExtendableMessage<?, ?>) messageLite).extensions.iterator();
|
||||
while (iter.hasNext()) {
|
||||
Map.Entry<GeneratedMessageLite.ExtensionDescriptor, Object> entry = iter.next();
|
||||
printField(buffer, indent, "[" + entry.getKey().getNumber() + "]", entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
if (((GeneratedMessageLite<?, ?>) messageLite).unknownFields != null) {
|
||||
((GeneratedMessageLite<?, ?>) messageLite).unknownFields.printWithIndent(buffer, indent);
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean isDefaultValue(Object o) {
|
||||
if (o instanceof Boolean) {
|
||||
return !((Boolean) o);
|
||||
}
|
||||
if (o instanceof Integer) {
|
||||
return ((Integer) o) == 0;
|
||||
}
|
||||
if (o instanceof Float) {
|
||||
return ((Float) o) == 0f;
|
||||
}
|
||||
if (o instanceof Double) {
|
||||
return ((Double) o) == 0d;
|
||||
}
|
||||
if (o instanceof String) {
|
||||
return o.equals("");
|
||||
}
|
||||
if (o instanceof ByteString) {
|
||||
return o.equals(ByteString.EMPTY);
|
||||
}
|
||||
if (o instanceof MessageLite) { // Can happen in oneofs.
|
||||
return o == ((MessageLite) o).getDefaultInstanceForType();
|
||||
}
|
||||
if (o instanceof java.lang.Enum<?>) { // Catches oneof enums.
|
||||
return ((java.lang.Enum<?>) o).ordinal() == 0;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a text proto field.
|
||||
*
|
||||
* <p>For use by generated code only.
|
||||
*
|
||||
* @param buffer the buffer to write to
|
||||
* @param indent the number of spaces the proto should be indented by
|
||||
* @param name the field name (in lower underscore case)
|
||||
* @param object the object value of the field
|
||||
*/
|
||||
static final void printField(StringBuilder buffer, int indent, String name, Object object) {
|
||||
if (object instanceof List<?>) {
|
||||
List<?> list = (List<?>) object;
|
||||
for (Object entry : list) {
|
||||
printField(buffer, indent, name, entry);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
buffer.append('\n');
|
||||
for (int i = 0; i < indent; i++) {
|
||||
buffer.append(' ');
|
||||
}
|
||||
buffer.append(name);
|
||||
|
||||
if (object instanceof String) {
|
||||
buffer.append(": \"").append(TextFormatEscaper.escapeText((String) object)).append('"');
|
||||
} else if (object instanceof ByteString) {
|
||||
buffer.append(": \"").append(TextFormatEscaper.escapeBytes((ByteString) object)).append('"');
|
||||
} else if (object instanceof GeneratedMessageLite) {
|
||||
buffer.append(" {");
|
||||
reflectivePrintWithIndent((GeneratedMessageLite<?, ?>) object, buffer, indent + 2);
|
||||
buffer.append("\n");
|
||||
for (int i = 0; i < indent; i++) {
|
||||
buffer.append(' ');
|
||||
}
|
||||
buffer.append("}");
|
||||
} else {
|
||||
buffer.append(": ").append(object.toString());
|
||||
}
|
||||
}
|
||||
|
||||
private static final String camelCaseToSnakeCase(String camelCase) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (int i = 0; i < camelCase.length(); i++) {
|
||||
char ch = camelCase.charAt(i);
|
||||
if (Character.isUpperCase(ch)) {
|
||||
builder.append("_");
|
||||
}
|
||||
builder.append(Character.toLowerCase(ch));
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
|
@ -1,143 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Base interface for methods common to {@link Message} and
|
||||
* {@link Message.Builder} to provide type equivalency.
|
||||
*
|
||||
* @author jonp@google.com (Jon Perlow)
|
||||
*/
|
||||
public interface MessageOrBuilder extends MessageLiteOrBuilder {
|
||||
|
||||
// (From MessageLite, re-declared here only for return type covariance.)
|
||||
@Override
|
||||
Message getDefaultInstanceForType();
|
||||
|
||||
/**
|
||||
* Returns a list of field paths (e.g. "foo.bar.baz") of required fields
|
||||
* which are not set in this message. You should call
|
||||
* {@link MessageLiteOrBuilder#isInitialized()} first to check if there
|
||||
* are any missing fields, as that method is likely to be much faster
|
||||
* than this one even when the message is fully-initialized.
|
||||
*/
|
||||
List<String> findInitializationErrors();
|
||||
|
||||
/**
|
||||
* Returns a comma-delimited list of required fields which are not set
|
||||
* in this message object. You should call
|
||||
* {@link MessageLiteOrBuilder#isInitialized()} first to check if there
|
||||
* are any missing fields, as that method is likely to be much faster
|
||||
* than this one even when the message is fully-initialized.
|
||||
*/
|
||||
String getInitializationErrorString();
|
||||
|
||||
/**
|
||||
* Get the message's type's descriptor. This differs from the
|
||||
* {@code getDescriptor()} method of generated message classes in that
|
||||
* this method is an abstract method of the {@code Message} interface
|
||||
* whereas {@code getDescriptor()} is a static method of a specific class.
|
||||
* They return the same thing.
|
||||
*/
|
||||
Descriptors.Descriptor getDescriptorForType();
|
||||
|
||||
/**
|
||||
* Returns a collection of all the fields in this message which are set
|
||||
* and their corresponding values. A singular ("required" or "optional")
|
||||
* field is set iff hasField() returns true for that field. A "repeated"
|
||||
* field is set iff getRepeatedFieldCount() is greater than zero. The
|
||||
* values are exactly what would be returned by calling
|
||||
* {@link #getField(Descriptors.FieldDescriptor)} for each field. The map
|
||||
* is guaranteed to be a sorted map, so iterating over it will return fields
|
||||
* in order by field number.
|
||||
* <br>
|
||||
* If this is for a builder, the returned map may or may not reflect future
|
||||
* changes to the builder. Either way, the returned map is itself
|
||||
* unmodifiable.
|
||||
*/
|
||||
Map<Descriptors.FieldDescriptor, Object> getAllFields();
|
||||
|
||||
/**
|
||||
* Returns true if the given oneof is set.
|
||||
* @throws IllegalArgumentException if
|
||||
* {@code oneof.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
boolean hasOneof(Descriptors.OneofDescriptor oneof);
|
||||
|
||||
/**
|
||||
* Obtains the FieldDescriptor if the given oneof is set. Returns null
|
||||
* if no field is set.
|
||||
*/
|
||||
Descriptors.FieldDescriptor getOneofFieldDescriptor(
|
||||
Descriptors.OneofDescriptor oneof);
|
||||
|
||||
/**
|
||||
* Returns true if the given field is set. This is exactly equivalent to
|
||||
* calling the generated "has" accessor method corresponding to the field.
|
||||
* @throws IllegalArgumentException The field is a repeated field, or
|
||||
* {@code field.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
boolean hasField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Obtains the value of the given field, or the default value if it is
|
||||
* not set. For primitive fields, the boxed primitive value is returned.
|
||||
* For enum fields, the EnumValueDescriptor for the value is returned. For
|
||||
* embedded message fields, the sub-message is returned. For repeated
|
||||
* fields, a java.util.List is returned.
|
||||
*/
|
||||
Object getField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Gets the number of elements of a repeated field. This is exactly
|
||||
* equivalent to calling the generated "Count" accessor method corresponding
|
||||
* to the field.
|
||||
* @throws IllegalArgumentException The field is not a repeated field, or
|
||||
* {@code field.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
int getRepeatedFieldCount(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Gets an element of a repeated field. For primitive fields, the boxed
|
||||
* primitive value is returned. For enum fields, the EnumValueDescriptor
|
||||
* for the value is returned. For embedded message fields, the sub-message
|
||||
* is returned.
|
||||
* @throws IllegalArgumentException The field is not a repeated field, or
|
||||
* {@code field.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
Object getRepeatedField(Descriptors.FieldDescriptor field, int index);
|
||||
|
||||
/** Get the {@link UnknownFieldSet} for this message. */
|
||||
UnknownFieldSet getUnknownFields();
|
||||
}
|
|
@ -1,990 +0,0 @@
|
|||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package org.apache.hadoop.hbase.shaded.com.google.protobuf;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
* Reflection utility methods shared by both mutable and immutable messages.
|
||||
*
|
||||
* @author liujisi@google.com (Pherl Liu)
|
||||
*/
|
||||
class MessageReflection {
|
||||
|
||||
static void writeMessageTo(
|
||||
Message message,
|
||||
Map<FieldDescriptor, Object> fields,
|
||||
CodedOutputStream output,
|
||||
boolean alwaysWriteRequiredFields)
|
||||
throws IOException {
|
||||
final boolean isMessageSet =
|
||||
message.getDescriptorForType().getOptions().getMessageSetWireFormat();
|
||||
if (alwaysWriteRequiredFields) {
|
||||
fields = new TreeMap<FieldDescriptor, Object>(fields);
|
||||
for (final FieldDescriptor field :
|
||||
message.getDescriptorForType().getFields()) {
|
||||
if (field.isRequired() && !fields.containsKey(field)) {
|
||||
fields.put(field, message.getField(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (final Map.Entry<Descriptors.FieldDescriptor, Object> entry :
|
||||
fields.entrySet()) {
|
||||
final Descriptors.FieldDescriptor field = entry.getKey();
|
||||
final Object value = entry.getValue();
|
||||
if (isMessageSet && field.isExtension() &&
|
||||
field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE &&
|
||||
!field.isRepeated()) {
|
||||
output.writeMessageSetExtension(field.getNumber(), (Message) value);
|
||||
} else {
|
||||
FieldSet.writeField(field, value, output);
|
||||
}
|
||||
}
|
||||
|
||||
final UnknownFieldSet unknownFields = message.getUnknownFields();
|
||||
if (isMessageSet) {
|
||||
unknownFields.writeAsMessageSetTo(output);
|
||||
} else {
|
||||
unknownFields.writeTo(output);
|
||||
}
|
||||
}
|
||||
|
||||
static int getSerializedSize(
|
||||
Message message,
|
||||
Map<FieldDescriptor, Object> fields) {
|
||||
int size = 0;
|
||||
final boolean isMessageSet =
|
||||
message.getDescriptorForType().getOptions().getMessageSetWireFormat();
|
||||
|
||||
for (final Map.Entry<Descriptors.FieldDescriptor, Object> entry :
|
||||
fields.entrySet()) {
|
||||
final Descriptors.FieldDescriptor field = entry.getKey();
|
||||
final Object value = entry.getValue();
|
||||
if (isMessageSet && field.isExtension() &&
|
||||
field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE &&
|
||||
!field.isRepeated()) {
|
||||
size += CodedOutputStream.computeMessageSetExtensionSize(
|
||||
field.getNumber(), (Message) value);
|
||||
} else {
|
||||
size += FieldSet.computeFieldSize(field, value);
|
||||
}
|
||||
}
|
||||
|
||||
final UnknownFieldSet unknownFields = message.getUnknownFields();
|
||||
if (isMessageSet) {
|
||||
size += unknownFields.getSerializedSizeAsMessageSet();
|
||||
} else {
|
||||
size += unknownFields.getSerializedSize();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
static String delimitWithCommas(List<String> parts) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
for (String part : parts) {
|
||||
if (result.length() > 0) {
|
||||
result.append(", ");
|
||||
}
|
||||
result.append(part);
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static boolean isInitialized(MessageOrBuilder message) {
|
||||
// Check that all required fields are present.
|
||||
for (final Descriptors.FieldDescriptor field : message
|
||||
.getDescriptorForType()
|
||||
.getFields()) {
|
||||
if (field.isRequired()) {
|
||||
if (!message.hasField(field)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check that embedded messages are initialized.
|
||||
for (final Map.Entry<Descriptors.FieldDescriptor, Object> entry :
|
||||
message.getAllFields().entrySet()) {
|
||||
final Descriptors.FieldDescriptor field = entry.getKey();
|
||||
if (field.getJavaType() == Descriptors.FieldDescriptor.JavaType.MESSAGE) {
|
||||
if (field.isRepeated()) {
|
||||
for (final Message element
|
||||
: (List<Message>) entry.getValue()) {
|
||||
if (!element.isInitialized()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (!((Message) entry.getValue()).isInitialized()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static String subMessagePrefix(final String prefix,
|
||||
final Descriptors.FieldDescriptor field,
|
||||
final int index) {
|
||||
final StringBuilder result = new StringBuilder(prefix);
|
||||
if (field.isExtension()) {
|
||||
result.append('(')
|
||||
.append(field.getFullName())
|
||||
.append(')');
|
||||
} else {
|
||||
result.append(field.getName());
|
||||
}
|
||||
if (index != -1) {
|
||||
result.append('[')
|
||||
.append(index)
|
||||
.append(']');
|
||||
}
|
||||
result.append('.');
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
private static void findMissingFields(final MessageOrBuilder message,
|
||||
final String prefix,
|
||||
final List<String> results) {
|
||||
for (final Descriptors.FieldDescriptor field :
|
||||
message.getDescriptorForType().getFields()) {
|
||||
if (field.isRequired() && !message.hasField(field)) {
|
||||
results.add(prefix + field.getName());
|
||||
}
|
||||
}
|
||||
|
||||
for (final Map.Entry<Descriptors.FieldDescriptor, Object> entry :
|
||||
message.getAllFields().entrySet()) {
|
||||
final Descriptors.FieldDescriptor field = entry.getKey();
|
||||
final Object value = entry.getValue();
|
||||
|
||||
if (field.getJavaType() == Descriptors.FieldDescriptor.JavaType.MESSAGE) {
|
||||
if (field.isRepeated()) {
|
||||
int i = 0;
|
||||
for (final Object element : (List) value) {
|
||||
findMissingFields((MessageOrBuilder) element,
|
||||
subMessagePrefix(prefix, field, i++),
|
||||
results);
|
||||
}
|
||||
} else {
|
||||
if (message.hasField(field)) {
|
||||
findMissingFields((MessageOrBuilder) value,
|
||||
subMessagePrefix(prefix, field, -1),
|
||||
results);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates {@code this.missingFields} with the full "path" of each missing
|
||||
* required field in the given message.
|
||||
*/
|
||||
static List<String> findMissingFields(
|
||||
final MessageOrBuilder message) {
|
||||
final List<String> results = new ArrayList<String>();
|
||||
findMissingFields(message, "", results);
|
||||
return results;
|
||||
}
|
||||
|
||||
static interface MergeTarget {
|
||||
enum ContainerType {
|
||||
MESSAGE, EXTENSION_SET
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the descriptor for the target.
|
||||
*/
|
||||
public Descriptors.Descriptor getDescriptorForType();
|
||||
|
||||
public ContainerType getContainerType();
|
||||
|
||||
public ExtensionRegistry.ExtensionInfo findExtensionByName(
|
||||
ExtensionRegistry registry, String name);
|
||||
|
||||
public ExtensionRegistry.ExtensionInfo findExtensionByNumber(
|
||||
ExtensionRegistry registry, Descriptors.Descriptor containingType,
|
||||
int fieldNumber);
|
||||
|
||||
/**
|
||||
* Obtains the value of the given field, or the default value if it is not
|
||||
* set. For primitive fields, the boxed primitive value is returned. For
|
||||
* enum fields, the EnumValueDescriptor for the value is returned. For
|
||||
* embedded message fields, the sub-message is returned. For repeated
|
||||
* fields, a java.util.List is returned.
|
||||
*/
|
||||
public Object getField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Returns true if the given field is set. This is exactly equivalent to
|
||||
* calling the generated "has" accessor method corresponding to the field.
|
||||
*
|
||||
* @throws IllegalArgumentException The field is a repeated field, or {@code
|
||||
* field.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
boolean hasField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Sets a field to the given value. The value must be of the correct type
|
||||
* for this field, i.e. the same type that
|
||||
* {@link Message#getField(Descriptors.FieldDescriptor)}
|
||||
* would return.
|
||||
*/
|
||||
MergeTarget setField(Descriptors.FieldDescriptor field, Object value);
|
||||
|
||||
/**
|
||||
* Clears the field. This is exactly equivalent to calling the generated
|
||||
* "clear" accessor method corresponding to the field.
|
||||
*/
|
||||
MergeTarget clearField(Descriptors.FieldDescriptor field);
|
||||
|
||||
/**
|
||||
* Sets an element of a repeated field to the given value. The value must
|
||||
* be of the correct type for this field, i.e. the same type that {@link
|
||||
* Message#getRepeatedField(Descriptors.FieldDescriptor, int)} would return.
|
||||
*
|
||||
* @throws IllegalArgumentException The field is not a repeated field, or
|
||||
* {@code field.getContainingType() !=
|
||||
* getDescriptorForType()}.
|
||||
*/
|
||||
MergeTarget setRepeatedField(Descriptors.FieldDescriptor field,
|
||||
int index, Object value);
|
||||
|
||||
/**
|
||||
* Like {@code setRepeatedField}, but appends the value as a new element.
|
||||
*
|
||||
* @throws IllegalArgumentException The field is not a repeated field, or
|
||||
* {@code field.getContainingType() !=
|
||||
* getDescriptorForType()}.
|
||||
*/
|
||||
MergeTarget addRepeatedField(Descriptors.FieldDescriptor field,
|
||||
Object value);
|
||||
|
||||
/**
|
||||
* Returns true if the given oneof is set.
|
||||
*
|
||||
* @throws IllegalArgumentException if
|
||||
* {@code oneof.getContainingType() != getDescriptorForType()}.
|
||||
*/
|
||||
boolean hasOneof(Descriptors.OneofDescriptor oneof);
|
||||
|
||||
/**
|
||||
* Clears the oneof. This is exactly equivalent to calling the generated
|
||||
* "clear" accessor method corresponding to the oneof.
|
||||
*/
|
||||
MergeTarget clearOneof(Descriptors.OneofDescriptor oneof);
|
||||
|
||||
/**
|
||||
* Obtains the FieldDescriptor if the given oneof is set. Returns null
|
||||
* if no field is set.
|
||||
*/
|
||||
Descriptors.FieldDescriptor getOneofFieldDescriptor(Descriptors.OneofDescriptor oneof);
|
||||
|
||||
/**
|
||||
* Parse the input stream into a sub field group defined based on either
|
||||
* FieldDescriptor or the default instance.
|
||||
*/
|
||||
Object parseGroup(CodedInputStream input, ExtensionRegistryLite registry,
|
||||
Descriptors.FieldDescriptor descriptor, Message defaultInstance)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Parse the input stream into a sub field message defined based on either
|
||||
* FieldDescriptor or the default instance.
|
||||
*/
|
||||
Object parseMessage(CodedInputStream input, ExtensionRegistryLite registry,
|
||||
Descriptors.FieldDescriptor descriptor, Message defaultInstance)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Parse from a ByteString into a sub field message defined based on either
|
||||
* FieldDescriptor or the default instance. There isn't a varint indicating
|
||||
* the length of the message at the beginning of the input ByteString.
|
||||
*/
|
||||
Object parseMessageFromBytes(
|
||||
ByteString bytes, ExtensionRegistryLite registry,
|
||||
Descriptors.FieldDescriptor descriptor, Message defaultInstance)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Returns the UTF8 validation level for the field.
|
||||
*/
|
||||
WireFormat.Utf8Validation getUtf8Validation(Descriptors.FieldDescriptor
|
||||
descriptor);
|
||||
|
||||
/**
|
||||
* Returns a new merge target for a sub-field. When defaultInstance is
|
||||
* provided, it indicates the descriptor is for an extension type, and
|
||||
* implementations should create a new instance from the defaultInstance
|
||||
* prototype directly.
|
||||
*/
|
||||
MergeTarget newMergeTargetForField(
|
||||
Descriptors.FieldDescriptor descriptor,
|
||||
Message defaultInstance);
|
||||
|
||||
/**
|
||||
* Finishes the merge and returns the underlying object.
|
||||
*/
|
||||
Object finish();
|
||||
}
|
||||
|
||||
static class BuilderAdapter implements MergeTarget {
|
||||
|
||||
private final Message.Builder builder;
|
||||
|
||||
@Override
|
||||
public Descriptors.Descriptor getDescriptorForType() {
|
||||
return builder.getDescriptorForType();
|
||||
}
|
||||
|
||||
public BuilderAdapter(Message.Builder builder) {
|
||||
this.builder = builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getField(Descriptors.FieldDescriptor field) {
|
||||
return builder.getField(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasField(Descriptors.FieldDescriptor field) {
|
||||
return builder.hasField(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget setField(Descriptors.FieldDescriptor field, Object value) {
|
||||
builder.setField(field, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget clearField(Descriptors.FieldDescriptor field) {
|
||||
builder.clearField(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget setRepeatedField(
|
||||
Descriptors.FieldDescriptor field, int index, Object value) {
|
||||
builder.setRepeatedField(field, index, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget addRepeatedField(Descriptors.FieldDescriptor field, Object value) {
|
||||
builder.addRepeatedField(field, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOneof(Descriptors.OneofDescriptor oneof) {
|
||||
return builder.hasOneof(oneof);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget clearOneof(Descriptors.OneofDescriptor oneof) {
|
||||
builder.clearOneof(oneof);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Descriptors.FieldDescriptor getOneofFieldDescriptor(Descriptors.OneofDescriptor oneof) {
|
||||
return builder.getOneofFieldDescriptor(oneof);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerType getContainerType() {
|
||||
return ContainerType.MESSAGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtensionRegistry.ExtensionInfo findExtensionByName(
|
||||
ExtensionRegistry registry, String name) {
|
||||
return registry.findImmutableExtensionByName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtensionRegistry.ExtensionInfo findExtensionByNumber(
|
||||
ExtensionRegistry registry, Descriptors.Descriptor containingType, int fieldNumber) {
|
||||
return registry.findImmutableExtensionByNumber(containingType,
|
||||
fieldNumber);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object parseGroup(
|
||||
CodedInputStream input,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
Descriptors.FieldDescriptor field,
|
||||
Message defaultInstance)
|
||||
throws IOException {
|
||||
Message.Builder subBuilder;
|
||||
// When default instance is not null. The field is an extension field.
|
||||
if (defaultInstance != null) {
|
||||
subBuilder = defaultInstance.newBuilderForType();
|
||||
} else {
|
||||
subBuilder = builder.newBuilderForField(field);
|
||||
}
|
||||
if (!field.isRepeated()) {
|
||||
Message originalMessage = (Message) getField(field);
|
||||
if (originalMessage != null) {
|
||||
subBuilder.mergeFrom(originalMessage);
|
||||
}
|
||||
}
|
||||
input.readGroup(field.getNumber(), subBuilder, extensionRegistry);
|
||||
return subBuilder.buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object parseMessage(
|
||||
CodedInputStream input,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
Descriptors.FieldDescriptor field,
|
||||
Message defaultInstance)
|
||||
throws IOException {
|
||||
Message.Builder subBuilder;
|
||||
// When default instance is not null. The field is an extension field.
|
||||
if (defaultInstance != null) {
|
||||
subBuilder = defaultInstance.newBuilderForType();
|
||||
} else {
|
||||
subBuilder = builder.newBuilderForField(field);
|
||||
}
|
||||
if (!field.isRepeated()) {
|
||||
Message originalMessage = (Message) getField(field);
|
||||
if (originalMessage != null) {
|
||||
subBuilder.mergeFrom(originalMessage);
|
||||
}
|
||||
}
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
return subBuilder.buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object parseMessageFromBytes(
|
||||
ByteString bytes,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
Descriptors.FieldDescriptor field,
|
||||
Message defaultInstance)
|
||||
throws IOException {
|
||||
Message.Builder subBuilder;
|
||||
// When default instance is not null. The field is an extension field.
|
||||
if (defaultInstance != null) {
|
||||
subBuilder = defaultInstance.newBuilderForType();
|
||||
} else {
|
||||
subBuilder = builder.newBuilderForField(field);
|
||||
}
|
||||
if (!field.isRepeated()) {
|
||||
Message originalMessage = (Message) getField(field);
|
||||
if (originalMessage != null) {
|
||||
subBuilder.mergeFrom(originalMessage);
|
||||
}
|
||||
}
|
||||
subBuilder.mergeFrom(bytes, extensionRegistry);
|
||||
return subBuilder.buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget newMergeTargetForField(
|
||||
Descriptors.FieldDescriptor field, Message defaultInstance) {
|
||||
if (defaultInstance != null) {
|
||||
return new BuilderAdapter(
|
||||
defaultInstance.newBuilderForType());
|
||||
} else {
|
||||
return new BuilderAdapter(builder.newBuilderForField(field));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public WireFormat.Utf8Validation getUtf8Validation(Descriptors.FieldDescriptor descriptor) {
|
||||
if (descriptor.needsUtf8Check()) {
|
||||
return WireFormat.Utf8Validation.STRICT;
|
||||
}
|
||||
// TODO(liujisi): support lazy strings for repeated fields.
|
||||
if (!descriptor.isRepeated()
|
||||
&& builder instanceof GeneratedMessage.Builder) {
|
||||
return WireFormat.Utf8Validation.LAZY;
|
||||
}
|
||||
return WireFormat.Utf8Validation.LOOSE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object finish() {
|
||||
return builder.buildPartial();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class ExtensionAdapter implements MergeTarget {
|
||||
|
||||
private final FieldSet<Descriptors.FieldDescriptor> extensions;
|
||||
|
||||
ExtensionAdapter(FieldSet<Descriptors.FieldDescriptor> extensions) {
|
||||
this.extensions = extensions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Descriptors.Descriptor getDescriptorForType() {
|
||||
throw new UnsupportedOperationException(
|
||||
"getDescriptorForType() called on FieldSet object");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getField(Descriptors.FieldDescriptor field) {
|
||||
return extensions.getField(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasField(Descriptors.FieldDescriptor field) {
|
||||
return extensions.hasField(field);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget setField(Descriptors.FieldDescriptor field, Object value) {
|
||||
extensions.setField(field, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget clearField(Descriptors.FieldDescriptor field) {
|
||||
extensions.clearField(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget setRepeatedField(
|
||||
Descriptors.FieldDescriptor field, int index, Object value) {
|
||||
extensions.setRepeatedField(field, index, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget addRepeatedField(Descriptors.FieldDescriptor field, Object value) {
|
||||
extensions.addRepeatedField(field, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasOneof(Descriptors.OneofDescriptor oneof) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget clearOneof(Descriptors.OneofDescriptor oneof) {
|
||||
// Nothing to clear.
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Descriptors.FieldDescriptor getOneofFieldDescriptor(Descriptors.OneofDescriptor oneof) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerType getContainerType() {
|
||||
return ContainerType.EXTENSION_SET;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtensionRegistry.ExtensionInfo findExtensionByName(
|
||||
ExtensionRegistry registry, String name) {
|
||||
return registry.findImmutableExtensionByName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtensionRegistry.ExtensionInfo findExtensionByNumber(
|
||||
ExtensionRegistry registry, Descriptors.Descriptor containingType, int fieldNumber) {
|
||||
return registry.findImmutableExtensionByNumber(containingType,
|
||||
fieldNumber);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object parseGroup(
|
||||
CodedInputStream input,
|
||||
ExtensionRegistryLite registry,
|
||||
Descriptors.FieldDescriptor field,
|
||||
Message defaultInstance)
|
||||
throws IOException {
|
||||
Message.Builder subBuilder =
|
||||
defaultInstance.newBuilderForType();
|
||||
if (!field.isRepeated()) {
|
||||
Message originalMessage = (Message) getField(field);
|
||||
if (originalMessage != null) {
|
||||
subBuilder.mergeFrom(originalMessage);
|
||||
}
|
||||
}
|
||||
input.readGroup(field.getNumber(), subBuilder, registry);
|
||||
return subBuilder.buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object parseMessage(
|
||||
CodedInputStream input,
|
||||
ExtensionRegistryLite registry,
|
||||
Descriptors.FieldDescriptor field,
|
||||
Message defaultInstance)
|
||||
throws IOException {
|
||||
Message.Builder subBuilder =
|
||||
defaultInstance.newBuilderForType();
|
||||
if (!field.isRepeated()) {
|
||||
Message originalMessage = (Message) getField(field);
|
||||
if (originalMessage != null) {
|
||||
subBuilder.mergeFrom(originalMessage);
|
||||
}
|
||||
}
|
||||
input.readMessage(subBuilder, registry);
|
||||
return subBuilder.buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object parseMessageFromBytes(
|
||||
ByteString bytes,
|
||||
ExtensionRegistryLite registry,
|
||||
Descriptors.FieldDescriptor field,
|
||||
Message defaultInstance)
|
||||
throws IOException {
|
||||
Message.Builder subBuilder = defaultInstance.newBuilderForType();
|
||||
if (!field.isRepeated()) {
|
||||
Message originalMessage = (Message) getField(field);
|
||||
if (originalMessage != null) {
|
||||
subBuilder.mergeFrom(originalMessage);
|
||||
}
|
||||
}
|
||||
subBuilder.mergeFrom(bytes, registry);
|
||||
return subBuilder.buildPartial();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MergeTarget newMergeTargetForField(
|
||||
Descriptors.FieldDescriptor descriptor, Message defaultInstance) {
|
||||
throw new UnsupportedOperationException(
|
||||
"newMergeTargetForField() called on FieldSet object");
|
||||
}
|
||||
|
||||
@Override
|
||||
public WireFormat.Utf8Validation getUtf8Validation(Descriptors.FieldDescriptor descriptor) {
|
||||
if (descriptor.needsUtf8Check()) {
|
||||
return WireFormat.Utf8Validation.STRICT;
|
||||
}
|
||||
// TODO(liujisi): support lazy strings for ExtesnsionSet.
|
||||
return WireFormat.Utf8Validation.LOOSE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object finish() {
|
||||
throw new UnsupportedOperationException(
|
||||
"finish() called on FieldSet object");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single field into MergeTarget. The target can be Message.Builder,
|
||||
* FieldSet or MutableMessage.
|
||||
*
|
||||
* Package-private because it is used by GeneratedMessage.ExtendableMessage.
|
||||
*
|
||||
* @param tag The tag, which should have already been read.
|
||||
* @return {@code true} unless the tag is an end-group tag.
|
||||
*/
|
||||
static boolean mergeFieldFrom(
|
||||
CodedInputStream input,
|
||||
UnknownFieldSet.Builder unknownFields,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
Descriptors.Descriptor type,
|
||||
MergeTarget target,
|
||||
int tag) throws IOException {
|
||||
if (type.getOptions().getMessageSetWireFormat() &&
|
||||
tag == WireFormat.MESSAGE_SET_ITEM_TAG) {
|
||||
mergeMessageSetExtensionFromCodedStream(
|
||||
input, unknownFields, extensionRegistry, type, target);
|
||||
return true;
|
||||
}
|
||||
|
||||
final int wireType = WireFormat.getTagWireType(tag);
|
||||
final int fieldNumber = WireFormat.getTagFieldNumber(tag);
|
||||
|
||||
final Descriptors.FieldDescriptor field;
|
||||
Message defaultInstance = null;
|
||||
|
||||
if (type.isExtensionNumber(fieldNumber)) {
|
||||
// extensionRegistry may be either ExtensionRegistry or
|
||||
// ExtensionRegistryLite. Since the type we are parsing is a full
|
||||
// message, only a full ExtensionRegistry could possibly contain
|
||||
// extensions of it. Otherwise we will treat the registry as if it
|
||||
// were empty.
|
||||
if (extensionRegistry instanceof ExtensionRegistry) {
|
||||
final ExtensionRegistry.ExtensionInfo extension =
|
||||
target.findExtensionByNumber((ExtensionRegistry) extensionRegistry,
|
||||
type, fieldNumber);
|
||||
if (extension == null) {
|
||||
field = null;
|
||||
} else {
|
||||
field = extension.descriptor;
|
||||
defaultInstance = extension.defaultInstance;
|
||||
if (defaultInstance == null &&
|
||||
field.getJavaType()
|
||||
== Descriptors.FieldDescriptor.JavaType.MESSAGE) {
|
||||
throw new IllegalStateException(
|
||||
"Message-typed extension lacked default instance: " +
|
||||
field.getFullName());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
field = null;
|
||||
}
|
||||
} else if (target.getContainerType() == MergeTarget.ContainerType.MESSAGE) {
|
||||
field = type.findFieldByNumber(fieldNumber);
|
||||
} else {
|
||||
field = null;
|
||||
}
|
||||
|
||||
boolean unknown = false;
|
||||
boolean packed = false;
|
||||
if (field == null) {
|
||||
unknown = true; // Unknown field.
|
||||
} else if (wireType == FieldSet.getWireFormatForFieldType(
|
||||
field.getLiteType(),
|
||||
false /* isPacked */)) {
|
||||
packed = false;
|
||||
} else if (field.isPackable() &&
|
||||
wireType == FieldSet.getWireFormatForFieldType(
|
||||
field.getLiteType(),
|
||||
true /* isPacked */)) {
|
||||
packed = true;
|
||||
} else {
|
||||
unknown = true; // Unknown wire type.
|
||||
}
|
||||
|
||||
if (unknown) { // Unknown field or wrong wire type. Skip.
|
||||
return unknownFields.mergeFieldFrom(tag, input);
|
||||
}
|
||||
|
||||
if (packed) {
|
||||
final int length = input.readRawVarint32();
|
||||
final int limit = input.pushLimit(length);
|
||||
if (field.getLiteType() == WireFormat.FieldType.ENUM) {
|
||||
while (input.getBytesUntilLimit() > 0) {
|
||||
final int rawValue = input.readEnum();
|
||||
if (field.getFile().supportsUnknownEnumValue()) {
|
||||
target.addRepeatedField(field,
|
||||
field.getEnumType().findValueByNumberCreatingIfUnknown(rawValue));
|
||||
} else {
|
||||
final Object value = field.getEnumType().findValueByNumber(rawValue);
|
||||
if (value == null) {
|
||||
// If the number isn't recognized as a valid value for this
|
||||
// enum, drop it (don't even add it to unknownFields).
|
||||
return true;
|
||||
}
|
||||
target.addRepeatedField(field, value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (input.getBytesUntilLimit() > 0) {
|
||||
final Object value = WireFormat.readPrimitiveField(
|
||||
input, field.getLiteType(), target.getUtf8Validation(field));
|
||||
target.addRepeatedField(field, value);
|
||||
}
|
||||
}
|
||||
input.popLimit(limit);
|
||||
} else {
|
||||
final Object value;
|
||||
switch (field.getType()) {
|
||||
case GROUP: {
|
||||
value = target
|
||||
.parseGroup(input, extensionRegistry, field, defaultInstance);
|
||||
break;
|
||||
}
|
||||
case MESSAGE: {
|
||||
value = target
|
||||
.parseMessage(input, extensionRegistry, field, defaultInstance);
|
||||
break;
|
||||
}
|
||||
case ENUM:
|
||||
final int rawValue = input.readEnum();
|
||||
if (field.getFile().supportsUnknownEnumValue()) {
|
||||
value = field.getEnumType().findValueByNumberCreatingIfUnknown(rawValue);
|
||||
} else {
|
||||
value = field.getEnumType().findValueByNumber(rawValue);
|
||||
// If the number isn't recognized as a valid value for this enum,
|
||||
// drop it.
|
||||
if (value == null) {
|
||||
unknownFields.mergeVarintField(fieldNumber, rawValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
value = WireFormat.readPrimitiveField(
|
||||
input, field.getLiteType(), target.getUtf8Validation(field));
|
||||
break;
|
||||
}
|
||||
|
||||
if (field.isRepeated()) {
|
||||
target.addRepeatedField(field, value);
|
||||
} else {
|
||||
target.setField(field, value);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by {@code #mergeFieldFrom()} to parse a MessageSet extension into
|
||||
* MergeTarget.
|
||||
*/
|
||||
private static void mergeMessageSetExtensionFromCodedStream(
|
||||
CodedInputStream input,
|
||||
UnknownFieldSet.Builder unknownFields,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
Descriptors.Descriptor type,
|
||||
MergeTarget target) throws IOException {
|
||||
|
||||
// The wire format for MessageSet is:
|
||||
// message MessageSet {
|
||||
// repeated group Item = 1 {
|
||||
// required int32 typeId = 2;
|
||||
// required bytes message = 3;
|
||||
// }
|
||||
// }
|
||||
// "typeId" is the extension's field number. The extension can only be
|
||||
// a message type, where "message" contains the encoded bytes of that
|
||||
// message.
|
||||
//
|
||||
// In practice, we will probably never see a MessageSet item in which
|
||||
// the message appears before the type ID, or where either field does not
|
||||
// appear exactly once. However, in theory such cases are valid, so we
|
||||
// should be prepared to accept them.
|
||||
|
||||
int typeId = 0;
|
||||
ByteString rawBytes = null; // If we encounter "message" before "typeId"
|
||||
ExtensionRegistry.ExtensionInfo extension = null;
|
||||
|
||||
// Read bytes from input, if we get it's type first then parse it eagerly,
|
||||
// otherwise we store the raw bytes in a local variable.
|
||||
while (true) {
|
||||
final int tag = input.readTag();
|
||||
if (tag == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (tag == WireFormat.MESSAGE_SET_TYPE_ID_TAG) {
|
||||
typeId = input.readUInt32();
|
||||
if (typeId != 0) {
|
||||
// extensionRegistry may be either ExtensionRegistry or
|
||||
// ExtensionRegistryLite. Since the type we are parsing is a full
|
||||
// message, only a full ExtensionRegistry could possibly contain
|
||||
// extensions of it. Otherwise we will treat the registry as if it
|
||||
// were empty.
|
||||
if (extensionRegistry instanceof ExtensionRegistry) {
|
||||
extension = target.findExtensionByNumber(
|
||||
(ExtensionRegistry) extensionRegistry, type, typeId);
|
||||
}
|
||||
}
|
||||
|
||||
} else if (tag == WireFormat.MESSAGE_SET_MESSAGE_TAG) {
|
||||
if (typeId != 0) {
|
||||
if (extension != null &&
|
||||
ExtensionRegistryLite.isEagerlyParseMessageSets()) {
|
||||
// We already know the type, so we can parse directly from the
|
||||
// input with no copying. Hooray!
|
||||
eagerlyMergeMessageSetExtension(
|
||||
input, extension, extensionRegistry, target);
|
||||
rawBytes = null;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// We haven't seen a type ID yet or we want parse message lazily.
|
||||
rawBytes = input.readBytes();
|
||||
|
||||
} else { // Unknown tag. Skip it.
|
||||
if (!input.skipField(tag)) {
|
||||
break; // End of group
|
||||
}
|
||||
}
|
||||
}
|
||||
input.checkLastTagWas(WireFormat.MESSAGE_SET_ITEM_END_TAG);
|
||||
|
||||
// Process the raw bytes.
|
||||
if (rawBytes != null && typeId != 0) { // Zero is not a valid type ID.
|
||||
if (extension != null) { // We known the type
|
||||
mergeMessageSetExtensionFromBytes(
|
||||
rawBytes, extension, extensionRegistry, target);
|
||||
} else { // We don't know how to parse this. Ignore it.
|
||||
if (rawBytes != null) {
|
||||
unknownFields.mergeField(typeId, UnknownFieldSet.Field.newBuilder()
|
||||
.addLengthDelimited(rawBytes).build());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void mergeMessageSetExtensionFromBytes(
|
||||
ByteString rawBytes,
|
||||
ExtensionRegistry.ExtensionInfo extension,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
MergeTarget target) throws IOException {
|
||||
|
||||
Descriptors.FieldDescriptor field = extension.descriptor;
|
||||
boolean hasOriginalValue = target.hasField(field);
|
||||
|
||||
if (hasOriginalValue || ExtensionRegistryLite.isEagerlyParseMessageSets()) {
|
||||
// If the field already exists, we just parse the field.
|
||||
Object value = target.parseMessageFromBytes(
|
||||
rawBytes, extensionRegistry,field, extension.defaultInstance);
|
||||
target.setField(field, value);
|
||||
} else {
|
||||
// Use LazyField to load MessageSet lazily.
|
||||
LazyField lazyField = new LazyField(
|
||||
extension.defaultInstance, extensionRegistry, rawBytes);
|
||||
target.setField(field, lazyField);
|
||||
}
|
||||
}
|
||||
|
||||
private static void eagerlyMergeMessageSetExtension(
|
||||
CodedInputStream input,
|
||||
ExtensionRegistry.ExtensionInfo extension,
|
||||
ExtensionRegistryLite extensionRegistry,
|
||||
MergeTarget target) throws IOException {
|
||||
Descriptors.FieldDescriptor field = extension.descriptor;
|
||||
Object value = target.parseMessage(input, extensionRegistry, field,
|
||||
extension.defaultInstance);
|
||||
target.setField(field, value);
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue