Revert "HBASE-16952 Replace hadoop-maven-plugins with protobuf-maven-plugin for building protos"
Revert premature commit
This reverts commit d0e61b0e9a
.
This commit is contained in:
parent
7b74dd0374
commit
738ff821dd
|
@ -18,16 +18,25 @@ v2.5.0 of protobuf, it is obtainable from here:
|
||||||
|
|
||||||
https://github.com/google/protobuf/releases/tag/v2.5.0
|
https://github.com/google/protobuf/releases/tag/v2.5.0
|
||||||
|
|
||||||
You can compile the protoc definitions by invoking maven with profile compile-protobuf
|
HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
|
||||||
or passing in compile-protobuf property.
|
compile the protoc definitions by invoking maven with profile compile-protobuf or
|
||||||
|
passing in compile-protobuf property.
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf
|
mvn compile -Dcompile-protobuf
|
||||||
or
|
or
|
||||||
$ mvn compile -Pcompile-protobuf
|
mvn compile -Pcompile-protobuf
|
||||||
|
|
||||||
You may also want to define protocExecutable for the protoc binary
|
You may also want to define protoc.path for the protoc binary
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
|
mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
|
||||||
|
|
||||||
|
If you have added a new proto file, you should add it to the pom.xml file first.
|
||||||
|
Other modules also support the maven profile.
|
||||||
|
|
||||||
After you've done the above, check it in and then check it in (or post a patch
|
After you've done the above, check it in and then check it in (or post a patch
|
||||||
on a JIRA with your definition file changes and the generated files).
|
on a JIRA with your definition file changes and the generated files).
|
||||||
|
|
||||||
|
NOTE: The maven protoc plugin is a little broken. It will only source one dir
|
||||||
|
at a time. If changes in protobuf files, you will have to first do protoc with
|
||||||
|
the src directory pointing back into hbase-protocol module and then rerun it
|
||||||
|
after editing the pom to point in here to source .proto files.
|
||||||
|
|
|
@ -189,17 +189,44 @@
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile-protoc</id>
|
<id>compile-protoc</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>compile</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protoSourceRoot>${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf</protoSourceRoot>
|
<imports>
|
||||||
|
<!--Reference the .protos files up in hbase-protocol so don't
|
||||||
|
have to copy them local here-->
|
||||||
|
<param>${basedir}/src/main/protobuf</param>
|
||||||
|
<param>${basedir}/../hbase-protocol/src/main/protobuf</param>
|
||||||
|
</imports>
|
||||||
|
<source>
|
||||||
|
<!--The last directory wins so we need to for now manually run
|
||||||
|
it once with the hbase-protocol pointer and then after
|
||||||
|
with pointer to the local protobuf dir-->
|
||||||
|
<directory>${basedir}/../hbase-protocol/src/main/protobuf</directory>
|
||||||
|
<directory>${basedir}/src/main/protobuf</directory>
|
||||||
|
<!-- Unfortunately, Hadoop plugin does not support *.proto.
|
||||||
|
We have to individually list every proto file here -->
|
||||||
|
<includes>
|
||||||
|
<!--CPEPs-->
|
||||||
|
<include>Aggregate.proto</include>
|
||||||
|
<include>BulkDelete.proto</include>
|
||||||
|
<include>DummyRegionServerEndpoint.proto</include>
|
||||||
|
<include>ColumnAggregationNullResponseProtocol.proto</include>
|
||||||
|
<include>ColumnAggregationProtocol.proto</include>
|
||||||
|
<include>ColumnAggregationWithErrorsProtocol.proto</include>
|
||||||
|
<include>IncrementCounterProcessor.proto</include>
|
||||||
|
<include>SecureBulkLoad.proto</include>
|
||||||
|
</includes>
|
||||||
|
</source>
|
||||||
|
<!--<output>${project.build.directory}/generated-sources/java</output>-->
|
||||||
|
<output>${basedir}/src/main/java/</output>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
|
|
|
@ -65,3 +65,4 @@ Example code.
|
||||||
Also includes example coprocessor endpoint examples. The protobuf files are at src/main/protobuf.
|
Also includes example coprocessor endpoint examples. The protobuf files are at src/main/protobuf.
|
||||||
See hbase-protocol README.txt for how to generate the example RowCountService Coprocessor
|
See hbase-protocol README.txt for how to generate the example RowCountService Coprocessor
|
||||||
Endpoint and Aggregator examples.
|
Endpoint and Aggregator examples.
|
||||||
|
|
||||||
|
|
|
@ -179,15 +179,30 @@
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile-protoc</id>
|
<id>compile-protoc</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>compile</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<imports>
|
||||||
|
<param>${basedir}/src/main/protobuf</param>
|
||||||
|
</imports>
|
||||||
|
<source>
|
||||||
|
<directory>${basedir}/src/main/protobuf</directory>
|
||||||
|
<!-- Unfortunately, Hadoop plugin does not support *.proto.
|
||||||
|
We have to individually list every proto file here -->
|
||||||
|
<includes>
|
||||||
|
<include>Examples.proto</include>
|
||||||
|
</includes>
|
||||||
|
</source>
|
||||||
|
<!--<output>${project.build.directory}/generated-sources/java</output>-->
|
||||||
|
<output>${basedir}/src/main/java/</output>
|
||||||
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
Please read carefully as the 'menu options' have changed.
|
Please read carefully as the 'menu options' have changed.
|
||||||
What you do in here is not what you do elsewhere to generate
|
|
||||||
proto java files.
|
|
||||||
|
|
||||||
This module has proto files used by core. These protos
|
This module has proto files used by core. These protos
|
||||||
overlap with protos that are used by coprocessor endpoints
|
overlap with protos that are used by coprocessor endpoints
|
||||||
|
@ -22,9 +20,26 @@ Finally, this module also includes patches applied on top of
|
||||||
protobuf to add functionality not yet in protobuf that we
|
protobuf to add functionality not yet in protobuf that we
|
||||||
need now.
|
need now.
|
||||||
|
|
||||||
|
The shaded generated java files, including the patched protobuf
|
||||||
|
source files are all checked in.
|
||||||
|
|
||||||
If you make changes to protos, to the protobuf version or to
|
If you make changes to protos, to the protobuf version or to
|
||||||
the patches you want to apply to protobuf, you must rerun the
|
the patches you want to apply to protobuf, you must rerun this
|
||||||
below step and then check in what it generated:
|
step.
|
||||||
|
|
||||||
|
First ensure that the appropriate protobuf protoc tool is in
|
||||||
|
your $PATH as in:
|
||||||
|
|
||||||
|
$ export PATH=~/bin/protobuf-3.1.0/src:$PATH
|
||||||
|
|
||||||
|
.. or pass -Dprotoc.path=PATH_TO_PROTOC when running
|
||||||
|
the below mvn commands. NOTE: The protoc that we use internally
|
||||||
|
is very likely NOT what is used over in the hbase-protocol
|
||||||
|
module (here we'd use a 3.1.0 where in hbase-protocol we'll
|
||||||
|
use something older, a 2.5.0). You may need to download protobuf and
|
||||||
|
build protoc first.
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
$ mvn install -Dcompile-protobuf
|
$ mvn install -Dcompile-protobuf
|
||||||
|
|
||||||
|
@ -32,20 +47,15 @@ or
|
||||||
|
|
||||||
$ mvn install -Pcompille-protobuf
|
$ mvn install -Pcompille-protobuf
|
||||||
|
|
||||||
|
to build and trigger the special generate-shaded-classes profile.
|
||||||
When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded
|
When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded
|
||||||
will have been updated. Make sure all builds and then carefully
|
will have been updated. Make sure all builds and then carefully
|
||||||
check in the changes. Files may have been added or removed
|
check in the changes. Files may have been added or removed
|
||||||
by the steps above.
|
by the steps above.
|
||||||
|
|
||||||
The protobuf version used internally by hbase differs from what
|
|
||||||
is used over in the CPEP hbase-protocol module but in here, the
|
|
||||||
mvn takes care of ensuring we have the right protobuf in place so
|
|
||||||
you don't have to.
|
|
||||||
|
|
||||||
If you have patches for the protobuf, add them to
|
If you have patches for the protobuf, add them to
|
||||||
src/main/patches directory. They will be applied after
|
src/main/patches directory. They will be applied after
|
||||||
protobuf is shaded and unbundled into src/main/java.
|
protobuf is shaded and unbundled into src/main/java.
|
||||||
|
|
||||||
See the pom.xml under the generate-shaded-classes profile
|
See the pom.xml under the generate-shaded-classes profile
|
||||||
for more info on how this step works; it is a little involved
|
for more info on how this step works.
|
||||||
and a bit messy but all in the name of saving you pain.
|
|
||||||
|
|
|
@ -40,13 +40,6 @@
|
||||||
<sources.dir>src/main/java</sources.dir>
|
<sources.dir>src/main/java</sources.dir>
|
||||||
</properties>
|
</properties>
|
||||||
<build>
|
<build>
|
||||||
<extensions>
|
|
||||||
<extension>
|
|
||||||
<groupId>kr.motd.maven</groupId>
|
|
||||||
<artifactId>os-maven-plugin</artifactId>
|
|
||||||
<version>1.4.0.Final</version>
|
|
||||||
</extension>
|
|
||||||
</extensions>
|
|
||||||
<!--I want to override these in profile so define them
|
<!--I want to override these in profile so define them
|
||||||
with variables up here-->
|
with variables up here-->
|
||||||
<sourceDirectory>${sources.dir}</sourceDirectory>
|
<sourceDirectory>${sources.dir}</sourceDirectory>
|
||||||
|
@ -223,20 +216,58 @@
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protocArtifact>com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier}</protocArtifact>
|
<protocVersion>${internal.protobuf.version}</protocVersion>
|
||||||
</configuration>
|
</configuration>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile-protoc</id>
|
<id>compile-protoc</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>compile</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
<configuration>
|
<configuration>
|
||||||
<outputDirectory>${sources.dir}</outputDirectory>
|
<imports>
|
||||||
|
<param>${basedir}/src/main/protobuf</param>
|
||||||
|
</imports>
|
||||||
|
<source>
|
||||||
|
<directory>${basedir}/src/main/protobuf</directory>
|
||||||
|
<!-- Unfortunately, Hadoop plugin does not support *.proto.
|
||||||
|
We have to individually list every proto file here -->
|
||||||
|
<includes>
|
||||||
|
<include>Admin.proto</include>
|
||||||
|
<include>Cell.proto</include>
|
||||||
|
<include>Client.proto</include>
|
||||||
|
<include>ClusterId.proto</include>
|
||||||
|
<include>ClusterStatus.proto</include>
|
||||||
|
<include>Comparator.proto</include>
|
||||||
|
<include>Encryption.proto</include>
|
||||||
|
<include>ErrorHandling.proto</include>
|
||||||
|
<include>FS.proto</include>
|
||||||
|
<include>Filter.proto</include>
|
||||||
|
<include>HBase.proto</include>
|
||||||
|
<include>HFile.proto</include>
|
||||||
|
<include>LoadBalancer.proto</include>
|
||||||
|
<include>MapReduce.proto</include>
|
||||||
|
<include>Master.proto</include>
|
||||||
|
<include>MasterProcedure.proto</include>
|
||||||
|
<include>Procedure.proto</include>
|
||||||
|
<include>Quota.proto</include>
|
||||||
|
<include>RPC.proto</include>
|
||||||
|
<include>RegionNormalizer.proto</include>
|
||||||
|
<include>RegionServerStatus.proto</include>
|
||||||
|
<include>Snapshot.proto</include>
|
||||||
|
<include>Tracing.proto</include>
|
||||||
|
<include>WAL.proto</include>
|
||||||
|
<include>ZooKeeper.proto</include>
|
||||||
|
<include>TestProcedure.proto</include>
|
||||||
|
<include>test.proto</include>
|
||||||
|
<include>test_rpc_service.proto</include>
|
||||||
|
</includes>
|
||||||
|
</source>
|
||||||
|
<output>${sources.dir}</output>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
//
|
||||||
|
// Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
// or more contributor license agreements. See the NOTICE file
|
||||||
|
// distributed with this work for additional information
|
||||||
|
// regarding copyright ownership. The ASF licenses this file
|
||||||
|
// to you under the Apache License, Version 2.0 (the
|
||||||
|
// "License"); you may not use this file except in compliance
|
||||||
|
// with the License. You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
import "CellMessage.proto";
|
||||||
|
|
||||||
|
package org.apache.hadoop.hbase.shaded.rest.protobuf.generated;
|
||||||
|
|
||||||
|
message CellSet {
|
||||||
|
message Row {
|
||||||
|
required bytes key = 1;
|
||||||
|
repeated Cell values = 2;
|
||||||
|
}
|
||||||
|
repeated Row rows = 1;
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Defines a protocol to perform multi row transactions.
|
||||||
|
* See BaseRowProcessorEndpoint for the implementation.
|
||||||
|
* See HRegion#processRowsWithLocks() for details.
|
||||||
|
*/
|
||||||
|
package hbase.pb;
|
||||||
|
|
||||||
|
option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated";
|
||||||
|
option java_outer_classname = "RowProcessorProtos";
|
||||||
|
option java_generic_services = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
option optimize_for = SPEED;
|
||||||
|
|
||||||
|
message ProcessRequest {
|
||||||
|
required string row_processor_class_name = 1;
|
||||||
|
optional string row_processor_initializer_message_name = 2;
|
||||||
|
optional bytes row_processor_initializer_message = 3;
|
||||||
|
optional uint64 nonce_group = 4;
|
||||||
|
optional uint64 nonce = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
message ProcessResponse {
|
||||||
|
required bytes row_processor_result = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
service RowProcessorService {
|
||||||
|
rpc Process(ProcessRequest) returns (ProcessResponse);
|
||||||
|
}
|
|
@ -15,21 +15,23 @@ protobuf, it is obtainable from here:
|
||||||
|
|
||||||
https://github.com/google/protobuf/releases/tag/v2.5.0
|
https://github.com/google/protobuf/releases/tag/v2.5.0
|
||||||
|
|
||||||
To generate java files from protos run:
|
HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
|
||||||
|
compile the protoc definitions by invoking maven with profile compile-protobuf or
|
||||||
|
passing in compile-protobuf property.
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf
|
mvn compile -Dcompile-protobuf
|
||||||
or
|
or
|
||||||
$ mvn compile -Pcompile-protobuf
|
mvn compile -Pcompile-protobuf
|
||||||
|
|
||||||
You may also want to define protocExecutable for the protoc binary
|
You may also want to define protoc.path for the protoc binary
|
||||||
|
|
||||||
mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
|
mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
|
||||||
|
|
||||||
If you have added a new proto file, you should add it to the pom.xml file first.
|
If you have added a new proto file, you should add it to the pom.xml file first.
|
||||||
Other modules also support the maven profile.
|
Other modules also support the maven profile.
|
||||||
|
|
||||||
NOTE: The protoc used here is probably NOT the same as the hbase-protocol-shaded
|
NOTE: The protoc used here is probably NOT the same as the hbase-protocol-shaded
|
||||||
module uses; here we use a more palatable version -- 2.5.0 -- whereas over in
|
module uses; here we use a more palatable version -- 2.5.0 -- wherease over in
|
||||||
the internal hbase-protocol-shaded module, we'd use something newer. Be conscious
|
the internal hbase-protocol-shaded module, we'd use something newer. Be conscious
|
||||||
of this when running your protoc being sure to apply the appropriate version
|
of this when running your protoc being sure to apply the appropriate version
|
||||||
per module.
|
per module.
|
||||||
|
|
|
@ -159,15 +159,60 @@
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile-protoc</id>
|
<id>compile-protoc</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>compile</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<imports>
|
||||||
|
<param>${basedir}/src/main/protobuf</param>
|
||||||
|
</imports>
|
||||||
|
<source>
|
||||||
|
<directory>${basedir}/src/main/protobuf</directory>
|
||||||
|
<!-- Unfortunately, Hadoop plugin does not support *.proto.
|
||||||
|
We have to individually list every proto file here -se-protocol/src/main/protobuf//AccessControl.proto
|
||||||
|
-->
|
||||||
|
<includes>
|
||||||
|
<include>AccessControl.proto</include>
|
||||||
|
<include>Admin.proto</include>
|
||||||
|
<include>Authentication.proto</include>
|
||||||
|
<include>Cell.proto</include>
|
||||||
|
<include>Client.proto</include>
|
||||||
|
<include>ClusterId.proto</include>
|
||||||
|
<include>ClusterStatus.proto</include>
|
||||||
|
<include>Comparator.proto</include>
|
||||||
|
<include>Encryption.proto</include>
|
||||||
|
<include>ErrorHandling.proto</include>
|
||||||
|
<include>FS.proto</include>
|
||||||
|
<include>Filter.proto</include>
|
||||||
|
<include>HBase.proto</include>
|
||||||
|
<include>HFile.proto</include>
|
||||||
|
<include>LoadBalancer.proto</include>
|
||||||
|
<include>MapReduce.proto</include>
|
||||||
|
<include>MultiRowMutation.proto</include>
|
||||||
|
<include>Quota.proto</include>
|
||||||
|
<include>RPC.proto</include>
|
||||||
|
<include>RowProcessor.proto</include>
|
||||||
|
<include>Snapshot.proto</include>
|
||||||
|
<!--These two test proto files are in shaded and non-shaded form
|
||||||
|
used both sides testing-->
|
||||||
|
<include>test.proto</include>
|
||||||
|
<include>test_rpc_service.proto</include>
|
||||||
|
<include>Tracing.proto</include>
|
||||||
|
<include>VisibilityLabels.proto</include>
|
||||||
|
<include>WAL.proto</include>
|
||||||
|
<include>ZooKeeper.proto</include>
|
||||||
|
<include>PingProtocol.proto</include>
|
||||||
|
</includes>
|
||||||
|
</source>
|
||||||
|
<!--<output>${project.build.directory}/generated-sources/java</output>-->
|
||||||
|
<output>${basedir}/src/main/java/</output>
|
||||||
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
|
@ -1,530 +0,0 @@
|
||||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
// source: TestProcedure.proto
|
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.ipc.protobuf.generated;
|
|
||||||
|
|
||||||
public final class TestProcedureProtos {
|
|
||||||
private TestProcedureProtos() {}
|
|
||||||
public static void registerAllExtensions(
|
|
||||||
com.google.protobuf.ExtensionRegistry registry) {
|
|
||||||
}
|
|
||||||
public interface TestTableDDLStateDataOrBuilder
|
|
||||||
extends com.google.protobuf.MessageOrBuilder {
|
|
||||||
|
|
||||||
// required string table_name = 1;
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
boolean hasTableName();
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
java.lang.String getTableName();
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
com.google.protobuf.ByteString
|
|
||||||
getTableNameBytes();
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Protobuf type {@code TestTableDDLStateData}
|
|
||||||
*/
|
|
||||||
public static final class TestTableDDLStateData extends
|
|
||||||
com.google.protobuf.GeneratedMessage
|
|
||||||
implements TestTableDDLStateDataOrBuilder {
|
|
||||||
// Use TestTableDDLStateData.newBuilder() to construct.
|
|
||||||
private TestTableDDLStateData(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
|
||||||
super(builder);
|
|
||||||
this.unknownFields = builder.getUnknownFields();
|
|
||||||
}
|
|
||||||
private TestTableDDLStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
|
||||||
|
|
||||||
private static final TestTableDDLStateData defaultInstance;
|
|
||||||
public static TestTableDDLStateData getDefaultInstance() {
|
|
||||||
return defaultInstance;
|
|
||||||
}
|
|
||||||
|
|
||||||
public TestTableDDLStateData getDefaultInstanceForType() {
|
|
||||||
return defaultInstance;
|
|
||||||
}
|
|
||||||
|
|
||||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
|
||||||
@java.lang.Override
|
|
||||||
public final com.google.protobuf.UnknownFieldSet
|
|
||||||
getUnknownFields() {
|
|
||||||
return this.unknownFields;
|
|
||||||
}
|
|
||||||
private TestTableDDLStateData(
|
|
||||||
com.google.protobuf.CodedInputStream input,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
|
||||||
initFields();
|
|
||||||
int mutable_bitField0_ = 0;
|
|
||||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
|
||||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
|
||||||
try {
|
|
||||||
boolean done = false;
|
|
||||||
while (!done) {
|
|
||||||
int tag = input.readTag();
|
|
||||||
switch (tag) {
|
|
||||||
case 0:
|
|
||||||
done = true;
|
|
||||||
break;
|
|
||||||
default: {
|
|
||||||
if (!parseUnknownField(input, unknownFields,
|
|
||||||
extensionRegistry, tag)) {
|
|
||||||
done = true;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 10: {
|
|
||||||
bitField0_ |= 0x00000001;
|
|
||||||
tableName_ = input.readBytes();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
||||||
throw e.setUnfinishedMessage(this);
|
|
||||||
} catch (java.io.IOException e) {
|
|
||||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
|
||||||
e.getMessage()).setUnfinishedMessage(this);
|
|
||||||
} finally {
|
|
||||||
this.unknownFields = unknownFields.build();
|
|
||||||
makeExtensionsImmutable();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
public static final com.google.protobuf.Descriptors.Descriptor
|
|
||||||
getDescriptor() {
|
|
||||||
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
||||||
internalGetFieldAccessorTable() {
|
|
||||||
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
|
|
||||||
.ensureFieldAccessorsInitialized(
|
|
||||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static com.google.protobuf.Parser<TestTableDDLStateData> PARSER =
|
|
||||||
new com.google.protobuf.AbstractParser<TestTableDDLStateData>() {
|
|
||||||
public TestTableDDLStateData parsePartialFrom(
|
|
||||||
com.google.protobuf.CodedInputStream input,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
|
||||||
return new TestTableDDLStateData(input, extensionRegistry);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
@java.lang.Override
|
|
||||||
public com.google.protobuf.Parser<TestTableDDLStateData> getParserForType() {
|
|
||||||
return PARSER;
|
|
||||||
}
|
|
||||||
|
|
||||||
private int bitField0_;
|
|
||||||
// required string table_name = 1;
|
|
||||||
public static final int TABLE_NAME_FIELD_NUMBER = 1;
|
|
||||||
private java.lang.Object tableName_;
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public boolean hasTableName() {
|
|
||||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public java.lang.String getTableName() {
|
|
||||||
java.lang.Object ref = tableName_;
|
|
||||||
if (ref instanceof java.lang.String) {
|
|
||||||
return (java.lang.String) ref;
|
|
||||||
} else {
|
|
||||||
com.google.protobuf.ByteString bs =
|
|
||||||
(com.google.protobuf.ByteString) ref;
|
|
||||||
java.lang.String s = bs.toStringUtf8();
|
|
||||||
if (bs.isValidUtf8()) {
|
|
||||||
tableName_ = s;
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public com.google.protobuf.ByteString
|
|
||||||
getTableNameBytes() {
|
|
||||||
java.lang.Object ref = tableName_;
|
|
||||||
if (ref instanceof java.lang.String) {
|
|
||||||
com.google.protobuf.ByteString b =
|
|
||||||
com.google.protobuf.ByteString.copyFromUtf8(
|
|
||||||
(java.lang.String) ref);
|
|
||||||
tableName_ = b;
|
|
||||||
return b;
|
|
||||||
} else {
|
|
||||||
return (com.google.protobuf.ByteString) ref;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initFields() {
|
|
||||||
tableName_ = "";
|
|
||||||
}
|
|
||||||
private byte memoizedIsInitialized = -1;
|
|
||||||
public final boolean isInitialized() {
|
|
||||||
byte isInitialized = memoizedIsInitialized;
|
|
||||||
if (isInitialized != -1) return isInitialized == 1;
|
|
||||||
|
|
||||||
if (!hasTableName()) {
|
|
||||||
memoizedIsInitialized = 0;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
memoizedIsInitialized = 1;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
|
||||||
throws java.io.IOException {
|
|
||||||
getSerializedSize();
|
|
||||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
||||||
output.writeBytes(1, getTableNameBytes());
|
|
||||||
}
|
|
||||||
getUnknownFields().writeTo(output);
|
|
||||||
}
|
|
||||||
|
|
||||||
private int memoizedSerializedSize = -1;
|
|
||||||
public int getSerializedSize() {
|
|
||||||
int size = memoizedSerializedSize;
|
|
||||||
if (size != -1) return size;
|
|
||||||
|
|
||||||
size = 0;
|
|
||||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
|
||||||
size += com.google.protobuf.CodedOutputStream
|
|
||||||
.computeBytesSize(1, getTableNameBytes());
|
|
||||||
}
|
|
||||||
size += getUnknownFields().getSerializedSize();
|
|
||||||
memoizedSerializedSize = size;
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final long serialVersionUID = 0L;
|
|
||||||
@java.lang.Override
|
|
||||||
protected java.lang.Object writeReplace()
|
|
||||||
throws java.io.ObjectStreamException {
|
|
||||||
return super.writeReplace();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
|
|
||||||
com.google.protobuf.ByteString data)
|
|
||||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
|
||||||
return PARSER.parseFrom(data);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
|
|
||||||
com.google.protobuf.ByteString data,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
|
||||||
return PARSER.parseFrom(data, extensionRegistry);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(byte[] data)
|
|
||||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
|
||||||
return PARSER.parseFrom(data);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
|
|
||||||
byte[] data,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
|
||||||
return PARSER.parseFrom(data, extensionRegistry);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input)
|
|
||||||
throws java.io.IOException {
|
|
||||||
return PARSER.parseFrom(input);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
|
|
||||||
java.io.InputStream input,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws java.io.IOException {
|
|
||||||
return PARSER.parseFrom(input, extensionRegistry);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input)
|
|
||||||
throws java.io.IOException {
|
|
||||||
return PARSER.parseDelimitedFrom(input);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(
|
|
||||||
java.io.InputStream input,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws java.io.IOException {
|
|
||||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
|
|
||||||
com.google.protobuf.CodedInputStream input)
|
|
||||||
throws java.io.IOException {
|
|
||||||
return PARSER.parseFrom(input);
|
|
||||||
}
|
|
||||||
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(
|
|
||||||
com.google.protobuf.CodedInputStream input,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws java.io.IOException {
|
|
||||||
return PARSER.parseFrom(input, extensionRegistry);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Builder newBuilder() { return Builder.create(); }
|
|
||||||
public Builder newBuilderForType() { return newBuilder(); }
|
|
||||||
public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData prototype) {
|
|
||||||
return newBuilder().mergeFrom(prototype);
|
|
||||||
}
|
|
||||||
public Builder toBuilder() { return newBuilder(this); }
|
|
||||||
|
|
||||||
@java.lang.Override
|
|
||||||
protected Builder newBuilderForType(
|
|
||||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
||||||
Builder builder = new Builder(parent);
|
|
||||||
return builder;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Protobuf type {@code TestTableDDLStateData}
|
|
||||||
*/
|
|
||||||
public static final class Builder extends
|
|
||||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
|
||||||
implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder {
|
|
||||||
public static final com.google.protobuf.Descriptors.Descriptor
|
|
||||||
getDescriptor() {
|
|
||||||
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
||||||
internalGetFieldAccessorTable() {
|
|
||||||
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable
|
|
||||||
.ensureFieldAccessorsInitialized(
|
|
||||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.newBuilder()
|
|
||||||
private Builder() {
|
|
||||||
maybeForceBuilderInitialization();
|
|
||||||
}
|
|
||||||
|
|
||||||
private Builder(
|
|
||||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
|
||||||
super(parent);
|
|
||||||
maybeForceBuilderInitialization();
|
|
||||||
}
|
|
||||||
private void maybeForceBuilderInitialization() {
|
|
||||||
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
private static Builder create() {
|
|
||||||
return new Builder();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder clear() {
|
|
||||||
super.clear();
|
|
||||||
tableName_ = "";
|
|
||||||
bitField0_ = (bitField0_ & ~0x00000001);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder clone() {
|
|
||||||
return create().mergeFrom(buildPartial());
|
|
||||||
}
|
|
||||||
|
|
||||||
public com.google.protobuf.Descriptors.Descriptor
|
|
||||||
getDescriptorForType() {
|
|
||||||
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor;
|
|
||||||
}
|
|
||||||
|
|
||||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstanceForType() {
|
|
||||||
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance();
|
|
||||||
}
|
|
||||||
|
|
||||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData build() {
|
|
||||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = buildPartial();
|
|
||||||
if (!result.isInitialized()) {
|
|
||||||
throw newUninitializedMessageException(result);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData buildPartial() {
|
|
||||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData(this);
|
|
||||||
int from_bitField0_ = bitField0_;
|
|
||||||
int to_bitField0_ = 0;
|
|
||||||
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
|
||||||
to_bitField0_ |= 0x00000001;
|
|
||||||
}
|
|
||||||
result.tableName_ = tableName_;
|
|
||||||
result.bitField0_ = to_bitField0_;
|
|
||||||
onBuilt();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
|
||||||
if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) {
|
|
||||||
return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other);
|
|
||||||
} else {
|
|
||||||
super.mergeFrom(other);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData other) {
|
|
||||||
if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance()) return this;
|
|
||||||
if (other.hasTableName()) {
|
|
||||||
bitField0_ |= 0x00000001;
|
|
||||||
tableName_ = other.tableName_;
|
|
||||||
onChanged();
|
|
||||||
}
|
|
||||||
this.mergeUnknownFields(other.getUnknownFields());
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public final boolean isInitialized() {
|
|
||||||
if (!hasTableName()) {
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Builder mergeFrom(
|
|
||||||
com.google.protobuf.CodedInputStream input,
|
|
||||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
|
||||||
throws java.io.IOException {
|
|
||||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parsedMessage = null;
|
|
||||||
try {
|
|
||||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
|
||||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
|
||||||
parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage();
|
|
||||||
throw e;
|
|
||||||
} finally {
|
|
||||||
if (parsedMessage != null) {
|
|
||||||
mergeFrom(parsedMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
private int bitField0_;
|
|
||||||
|
|
||||||
// required string table_name = 1;
|
|
||||||
private java.lang.Object tableName_ = "";
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public boolean hasTableName() {
|
|
||||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public java.lang.String getTableName() {
|
|
||||||
java.lang.Object ref = tableName_;
|
|
||||||
if (!(ref instanceof java.lang.String)) {
|
|
||||||
java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
|
||||||
.toStringUtf8();
|
|
||||||
tableName_ = s;
|
|
||||||
return s;
|
|
||||||
} else {
|
|
||||||
return (java.lang.String) ref;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public com.google.protobuf.ByteString
|
|
||||||
getTableNameBytes() {
|
|
||||||
java.lang.Object ref = tableName_;
|
|
||||||
if (ref instanceof String) {
|
|
||||||
com.google.protobuf.ByteString b =
|
|
||||||
com.google.protobuf.ByteString.copyFromUtf8(
|
|
||||||
(java.lang.String) ref);
|
|
||||||
tableName_ = b;
|
|
||||||
return b;
|
|
||||||
} else {
|
|
||||||
return (com.google.protobuf.ByteString) ref;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public Builder setTableName(
|
|
||||||
java.lang.String value) {
|
|
||||||
if (value == null) {
|
|
||||||
throw new NullPointerException();
|
|
||||||
}
|
|
||||||
bitField0_ |= 0x00000001;
|
|
||||||
tableName_ = value;
|
|
||||||
onChanged();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public Builder clearTableName() {
|
|
||||||
bitField0_ = (bitField0_ & ~0x00000001);
|
|
||||||
tableName_ = getDefaultInstance().getTableName();
|
|
||||||
onChanged();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* <code>required string table_name = 1;</code>
|
|
||||||
*/
|
|
||||||
public Builder setTableNameBytes(
|
|
||||||
com.google.protobuf.ByteString value) {
|
|
||||||
if (value == null) {
|
|
||||||
throw new NullPointerException();
|
|
||||||
}
|
|
||||||
bitField0_ |= 0x00000001;
|
|
||||||
tableName_ = value;
|
|
||||||
onChanged();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
// @@protoc_insertion_point(builder_scope:TestTableDDLStateData)
|
|
||||||
}
|
|
||||||
|
|
||||||
static {
|
|
||||||
defaultInstance = new TestTableDDLStateData(true);
|
|
||||||
defaultInstance.initFields();
|
|
||||||
}
|
|
||||||
|
|
||||||
// @@protoc_insertion_point(class_scope:TestTableDDLStateData)
|
|
||||||
}
|
|
||||||
|
|
||||||
private static com.google.protobuf.Descriptors.Descriptor
|
|
||||||
internal_static_TestTableDDLStateData_descriptor;
|
|
||||||
private static
|
|
||||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
|
||||||
internal_static_TestTableDDLStateData_fieldAccessorTable;
|
|
||||||
|
|
||||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
|
||||||
getDescriptor() {
|
|
||||||
return descriptor;
|
|
||||||
}
|
|
||||||
private static com.google.protobuf.Descriptors.FileDescriptor
|
|
||||||
descriptor;
|
|
||||||
static {
|
|
||||||
java.lang.String[] descriptorData = {
|
|
||||||
"\n\023TestProcedure.proto\"+\n\025TestTableDDLSta" +
|
|
||||||
"teData\022\022\n\ntable_name\030\001 \002(\tBH\n.org.apache" +
|
|
||||||
".hadoop.hbase.ipc.protobuf.generatedB\023Te" +
|
|
||||||
"stProcedureProtos\210\001\001"
|
|
||||||
};
|
|
||||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
|
||||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
|
||||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
|
||||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
|
||||||
descriptor = root;
|
|
||||||
internal_static_TestTableDDLStateData_descriptor =
|
|
||||||
getDescriptor().getMessageTypes().get(0);
|
|
||||||
internal_static_TestTableDDLStateData_fieldAccessorTable = new
|
|
||||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
|
||||||
internal_static_TestTableDDLStateData_descriptor,
|
|
||||||
new java.lang.String[] { "TableName", });
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
com.google.protobuf.Descriptors.FileDescriptor
|
|
||||||
.internalBuildGeneratedFileFrom(descriptorData,
|
|
||||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
|
||||||
}, assigner);
|
|
||||||
}
|
|
||||||
|
|
||||||
// @@protoc_insertion_point(outer_class_scope)
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
This maven module has the protobuf definition files used by REST.
|
|
||||||
|
|
||||||
The produced java classes are generated and then checked in. The reasoning is
|
|
||||||
that they change infrequently.
|
|
||||||
|
|
||||||
To regenerate the classes after making definition file changes, in here or over
|
|
||||||
in hbase-protocol since we source some of those protos in this package, ensure
|
|
||||||
first that the protobuf protoc tool is in your $PATH. You may need to download
|
|
||||||
it and build it first; it is part of the protobuf package. For example, if using
|
|
||||||
v2.5.0 of protobuf, it is obtainable from here:
|
|
||||||
|
|
||||||
https://github.com/google/protobuf/releases/tag/v2.5.0
|
|
||||||
|
|
||||||
You can compile the protoc definitions by invoking maven with profile compile-protobuf
|
|
||||||
or passing in compile-protobuf property.
|
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf
|
|
||||||
or
|
|
||||||
$ mvn compile -Pcompile-protobuf
|
|
||||||
|
|
||||||
You may also want to define protocExecutable for the protoc binary
|
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
|
|
||||||
|
|
||||||
After you've done the above, check it in and then check it in (or post a patch
|
|
||||||
on a JIRA with your definition file changes and the generated files).
|
|
|
@ -353,15 +353,40 @@
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile-protoc</id>
|
<id>compile-protoc</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>compile</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<imports>
|
||||||
|
<param>${basedir}/src/main/protobuf</param>
|
||||||
|
</imports>
|
||||||
|
<source>
|
||||||
|
<directory>${basedir}/src/main/protobuf</directory>
|
||||||
|
<!-- Unfortunately, Hadoop plugin does not support *.proto.
|
||||||
|
We have to individually list every proto file here -->
|
||||||
|
<includes>
|
||||||
|
<include>CellMessage.proto</include>
|
||||||
|
<include>CellSetMessage.proto</include>
|
||||||
|
<include>ColumnSchemaMessage.proto</include>
|
||||||
|
<include>NamespacePropertiesMessage.proto</include>
|
||||||
|
<include>NamespacesMessage.proto</include>
|
||||||
|
<include>ScannerMessage.proto</include>
|
||||||
|
<include>StorageClusterStatusMessage.proto</include>
|
||||||
|
<include>TableInfoMessage.proto</include>
|
||||||
|
<include>TableListMessage.proto</include>
|
||||||
|
<include>TableSchemaMessage.proto</include>
|
||||||
|
<include>VersionMessage.proto</include>
|
||||||
|
</includes>
|
||||||
|
</source>
|
||||||
|
<!--<output>${project.build.directory}/generated-sources/java</output>-->
|
||||||
|
<output>${basedir}/src/main/java/</output>
|
||||||
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
|
@ -1,26 +1,30 @@
|
||||||
This maven module has the protobuf definition files used by regionserver grouping.
|
These are the protobuf definition files used by the region grouping feature.
|
||||||
|
The protos here are used by the region grouping coprocessor endpoints.
|
||||||
|
|
||||||
The produced java classes are generated and then checked in. The reasoning is
|
The produced java classes are generated and then checked in. The reasoning
|
||||||
that they change infrequently.
|
is that they change infrequently and it saves generating anew on each build.
|
||||||
|
|
||||||
To regenerate the classes after making definition file changes, in here or over
|
To regenerate the classes after making definition file changes, ensure first that
|
||||||
in hbase-protocol since we source some of those protos in this package, ensure
|
the protobuf protoc tool is in your $PATH. You may need to download it and build
|
||||||
first that the protobuf protoc tool is in your $PATH. You may need to download
|
it first; its part of the protobuf package. For example, if using v2.5.0 of
|
||||||
it and build it first; it is part of the protobuf package. For example, if using
|
protobuf, it is obtainable from here:
|
||||||
v2.5.0 of protobuf, it is obtainable from here:
|
|
||||||
|
|
||||||
https://github.com/google/protobuf/releases/tag/v2.5.0
|
https://github.com/google/protobuf/releases/tag/v2.5.0
|
||||||
|
|
||||||
You can compile the protoc definitions by invoking maven with profile compile-protobuf
|
HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
|
||||||
or passing in compile-protobuf property.
|
compile the protoc definitions by invoking maven with profile compile-protobuf or
|
||||||
|
passing in compile-protobuf property.
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf
|
mvn compile -Dcompile-protobuf
|
||||||
or
|
or
|
||||||
$ mvn compile -Pcompile-protobuf
|
mvn compile -Pcompile-protobuf
|
||||||
|
|
||||||
You may also want to define protocExecutable for the protoc binary
|
You may also want to define protoc.path for the protoc binary
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
|
mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
|
||||||
|
|
||||||
|
If you have added a new proto file, you should add it to the pom.xml file first.
|
||||||
|
Other modules also support the maven profile.
|
||||||
|
|
||||||
After you've done the above, check it in and then check it in (or post a patch
|
After you've done the above, check it in and then check it in (or post a patch
|
||||||
on a JIRA with your definition file changes and the generated files).
|
on a JIRA with your definition file changes and the generated files).
|
||||||
|
|
|
@ -226,34 +226,6 @@
|
||||||
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
<surefire.skipFirstPart>true</surefire.skipFirstPart>
|
||||||
</properties>
|
</properties>
|
||||||
</profile>
|
</profile>
|
||||||
<profile>
|
|
||||||
<id>compile-protobuf</id>
|
|
||||||
<activation>
|
|
||||||
<property>
|
|
||||||
<name>compile-protobuf</name>
|
|
||||||
</property>
|
|
||||||
</activation>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>compile-protoc</id>
|
|
||||||
<phase>generate-sources</phase>
|
|
||||||
<goals>
|
|
||||||
<goal>compile</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<protoSourceRoot>${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf</protoSourceRoot>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</profile>
|
|
||||||
|
|
||||||
<!-- profile against Hadoop 2.x: This is the default. -->
|
<!-- profile against Hadoop 2.x: This is the default. -->
|
||||||
<profile>
|
<profile>
|
||||||
|
|
|
@ -11,16 +11,25 @@ v2.5.0 of protobuf, it is obtainable from here:
|
||||||
|
|
||||||
https://github.com/google/protobuf/releases/tag/v2.5.0
|
https://github.com/google/protobuf/releases/tag/v2.5.0
|
||||||
|
|
||||||
You can compile the protoc definitions by invoking maven with profile compile-protobuf
|
HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
|
||||||
or passing in compile-protobuf property.
|
compile the protoc definitions by invoking maven with profile compile-protobuf or
|
||||||
|
passing in compile-protobuf property.
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf
|
mvn compile -Dcompile-protobuf
|
||||||
or
|
or
|
||||||
$ mvn compile -Pcompile-protobuf
|
mvn compile -Pcompile-protobuf
|
||||||
|
|
||||||
You may also want to define protocExecutable for the protoc binary
|
You may also want to define protoc.path for the protoc binary
|
||||||
|
|
||||||
$ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc
|
mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
|
||||||
|
|
||||||
|
If you have added a new proto file, you should add it to the pom.xml file first.
|
||||||
|
Other modules also support the maven profile.
|
||||||
|
|
||||||
After you've done the above, check it in and then check it in (or post a patch
|
After you've done the above, check it in and then check it in (or post a patch
|
||||||
on a JIRA with your definition file changes and the generated files).
|
on a JIRA with your definition file changes and the generated files).
|
||||||
|
|
||||||
|
NOTE: The maven protoc plugin is a little broken. It will only source one dir
|
||||||
|
at a time. If changes in protobuf files, you will have to first do protoc with
|
||||||
|
the src directory pointing back into hbase-protocol module and then rerun it
|
||||||
|
after editing the pom to point in here to source .proto files.
|
||||||
|
|
|
@ -716,15 +716,30 @@
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile-protoc</id>
|
<id>compile-protoc</id>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>compile</goal>
|
<goal>protoc</goal>
|
||||||
</goals>
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<imports>
|
||||||
|
<param>${basedir}/src/main/protobuf</param>
|
||||||
|
</imports>
|
||||||
|
<source>
|
||||||
|
<directory>${basedir}/src/main/protobuf</directory>
|
||||||
|
<!-- Unfortunately, Hadoop plugin does not support *.proto.
|
||||||
|
We have to individually list every proto file here -->
|
||||||
|
<includes>
|
||||||
|
<include>SparkFilter.proto</include>
|
||||||
|
</includes>
|
||||||
|
</source>
|
||||||
|
<!--<output>${project.build.directory}/generated-sources/java</output>-->
|
||||||
|
<output>${basedir}/src/main/java/</output>
|
||||||
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
12
pom.xml
12
pom.xml
|
@ -811,13 +811,12 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.xolstice.maven.plugins</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>protobuf-maven-plugin</artifactId>
|
<artifactId>hadoop-maven-plugins</artifactId>
|
||||||
<version>${protobuf.plugin.version}</version>
|
<version>${hadoop-two.version}</version>
|
||||||
<configuration>
|
<configuration>
|
||||||
<protoSourceRoot>${basedir}/src/main/protobuf/</protoSourceRoot>
|
<protocVersion>${protobuf.version}</protocVersion>
|
||||||
<outputDirectory>${basedir}/src/main/java/</outputDirectory>
|
<protocCommand>${protoc.path}</protocCommand>
|
||||||
<clearOutputDirectory>false</clearOutputDirectory>
|
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
|
@ -1216,7 +1215,6 @@
|
||||||
<log4j.version>1.2.17</log4j.version>
|
<log4j.version>1.2.17</log4j.version>
|
||||||
<mockito-all.version>1.10.8</mockito-all.version>
|
<mockito-all.version>1.10.8</mockito-all.version>
|
||||||
<protobuf.version>2.5.0</protobuf.version>
|
<protobuf.version>2.5.0</protobuf.version>
|
||||||
<protobuf.plugin.version>0.5.0</protobuf.plugin.version>
|
|
||||||
<thrift.path>thrift</thrift.path>
|
<thrift.path>thrift</thrift.path>
|
||||||
<thrift.version>0.9.3</thrift.version>
|
<thrift.version>0.9.3</thrift.version>
|
||||||
<zookeeper.version>3.4.8</zookeeper.version>
|
<zookeeper.version>3.4.8</zookeeper.version>
|
||||||
|
|
Loading…
Reference in New Issue