HADOOP-16598. Backport "HADOOP-16558 [COMMON+HDFS] use protobuf-maven-plugin to generate protobuf classes" to all active branches

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Duo Zhang 2019-09-27 09:59:29 +08:00 committed by stack
parent 4d274f60bc
commit 07f59c023c
45 changed files with 147 additions and 171 deletions

View File

@ -351,6 +351,20 @@
</resource> </resource>
</resources> </resources>
<plugins> <plugins>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<executions>
<execution>
<id>src-compile-protoc</id>
<configuration><skip>false</skip></configuration>
</execution>
<execution>
<id>src-test-compile-protoc</id>
<configuration><skip>false</skip></configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
@ -371,58 +385,6 @@
</source> </source>
</configuration> </configuration>
</execution> </execution>
<execution>
<id>compile-protoc</id>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>HAServiceProtocol.proto</include>
<include>IpcConnectionContext.proto</include>
<include>ProtocolInfo.proto</include>
<include>RpcHeader.proto</include>
<include>ZKFCProtocol.proto</include>
<include>ProtobufRpcEngine.proto</include>
<include>Security.proto</include>
<include>GetUserMappingsProtocol.proto</include>
<include>TraceAdmin.proto</include>
<include>RefreshAuthorizationPolicyProtocol.proto</include>
<include>RefreshUserMappingsProtocol.proto</include>
<include>RefreshCallQueueProtocol.proto</include>
<include>GenericRefreshProtocol.proto</include>
<include>FSProtos.proto</include>
</includes>
</source>
</configuration>
</execution>
<execution>
<id>compile-test-protoc</id>
<goals>
<goal>test-protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/src/test/proto</param>
</imports>
<source>
<directory>${basedir}/src/test/proto</directory>
<includes>
<include>test.proto</include>
<include>test_rpc_service.proto</include>
</includes>
</source>
</configuration>
</execution>
<execution> <execution>
<id>resource-gz</id> <id>resource-gz</id>
<phase>generate-resources</phase> <phase>generate-resources</phase>

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.fs"; option java_package = "org.apache.hadoop.fs";
option java_outer_classname = "FSProtos"; option java_outer_classname = "FSProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.proto"; option java_package = "org.apache.hadoop.ipc.proto";
option java_outer_classname = "GenericRefreshProtocolProtos"; option java_outer_classname = "GenericRefreshProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.tools.proto"; option java_package = "org.apache.hadoop.tools.proto";
option java_outer_classname = "GetUserMappingsProtocolProtos"; option java_outer_classname = "GetUserMappingsProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ha.proto"; option java_package = "org.apache.hadoop.ha.proto";
option java_outer_classname = "HAServiceProtocolProtos"; option java_outer_classname = "HAServiceProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.protobuf"; option java_package = "org.apache.hadoop.ipc.protobuf";
option java_outer_classname = "IpcConnectionContextProtos"; option java_outer_classname = "IpcConnectionContextProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
/** /**
* These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer * These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer
* to marshal the request and response in the RPC layer. * to marshal the request and response in the RPC layer.

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.protobuf"; option java_package = "org.apache.hadoop.ipc.protobuf";
option java_outer_classname = "ProtocolInfoProtos"; option java_outer_classname = "ProtocolInfoProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.security.proto"; option java_package = "org.apache.hadoop.security.proto";
option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos"; option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.proto"; option java_package = "org.apache.hadoop.ipc.proto";
option java_outer_classname = "RefreshCallQueueProtocolProtos"; option java_outer_classname = "RefreshCallQueueProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.security.proto"; option java_package = "org.apache.hadoop.security.proto";
option java_outer_classname = "RefreshUserMappingsProtocolProtos"; option java_outer_classname = "RefreshUserMappingsProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.protobuf"; option java_package = "org.apache.hadoop.ipc.protobuf";
option java_outer_classname = "RpcHeaderProtos"; option java_outer_classname = "RpcHeaderProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.security.proto"; option java_package = "org.apache.hadoop.security.proto";
option java_outer_classname = "SecurityProtos"; option java_outer_classname = "SecurityProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.tracing"; option java_package = "org.apache.hadoop.tracing";
option java_outer_classname = "TraceAdminPB"; option java_outer_classname = "TraceAdminPB";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ha.proto"; option java_package = "org.apache.hadoop.ha.proto";
option java_outer_classname = "ZKFCProtocolProtos"; option java_outer_classname = "ZKFCProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.protobuf"; option java_package = "org.apache.hadoop.ipc.protobuf";
option java_outer_classname = "TestProtos"; option java_outer_classname = "TestProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.ipc.protobuf"; option java_package = "org.apache.hadoop.ipc.protobuf";
option java_outer_classname = "TestRpcServiceProtos"; option java_outer_classname = "TestRpcServiceProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -131,36 +131,18 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.hadoop</groupId> <groupId>org.xolstice.maven.plugins</groupId>
<artifactId>hadoop-maven-plugins</artifactId> <artifactId>protobuf-maven-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>compile-protoc</id> <id>src-compile-protoc</id>
<goals>
<goal>protoc</goal>
</goals>
<configuration> <configuration>
<protocVersion>${protobuf.version}</protocVersion> <skip>false</skip>
<protocCommand>${protoc.path}</protocCommand> <additionalProtoPathElements>
<imports> <additionalProtoPathElement>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param> ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
<param>${basedir}/src/main/proto</param> </additionalProtoPathElement>
</imports> </additionalProtoPathElements>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>ClientDatanodeProtocol.proto</include>
<include>ClientNamenodeProtocol.proto</include>
<include>acl.proto</include>
<include>xattr.proto</include>
<include>datatransfer.proto</include>
<include>hdfs.proto</include>
<include>encryption.proto</include>
<include>inotify.proto</include>
<include>erasurecoding.proto</include>
<include>ReconfigurationProtocol.proto</include>
</includes>
</source>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax="proto2";
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax="proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "ClientNamenodeProtocolProtos"; option java_outer_classname = "ClientNamenodeProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax="proto2";
// This file contains protocol buffers that are used to reconfigure NameNode // This file contains protocol buffers that are used to reconfigure NameNode
// and DataNode by HDFS admin. // and DataNode by HDFS admin.

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax="proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "AclProtos"; option java_outer_classname = "AclProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax="proto2";
// This file contains protocol buffers that are used to transfer data // This file contains protocol buffers that are used to transfer data
// to and from the datanode, as well as between datanodes. // to and from the datanode, as well as between datanodes.

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax="proto2";
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax="proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "ErasureCodingProtos"; option java_outer_classname = "ErasureCodingProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax="proto2";
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax="proto2";
// This file contains protocol buffers used to communicate edits to clients // This file contains protocol buffers used to communicate edits to clients
// as part of the inotify system. // as part of the inotify system.

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax="proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "XAttrProtos"; option java_outer_classname = "XAttrProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -174,29 +174,21 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</executions> </executions>
</plugin> </plugin>
<plugin> <plugin>
<groupId>org.apache.hadoop</groupId> <groupId>org.xolstice.maven.plugins</groupId>
<artifactId>hadoop-maven-plugins</artifactId> <artifactId>protobuf-maven-plugin</artifactId>
<executions> <executions>
<execution> <execution>
<id>compile-protoc</id> <id>src-compile-protoc</id>
<goals>
<goal>protoc</goal>
</goals>
<configuration> <configuration>
<protocVersion>${protobuf.version}</protocVersion> <skip>false</skip>
<protocCommand>${protoc.path}</protocCommand> <additionalProtoPathElements>
<imports> <additionalProtoPathElement>
<param>${basedir}/../hadoop-hdfs-client/src/main/proto</param> ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param> </additionalProtoPathElement>
<param>${basedir}/src/main/proto</param> <additionalProtoPathElement>
</imports> ${basedir}/../hadoop-hdfs-client/src/main/proto
<source> </additionalProtoPathElement>
<directory>${basedir}/src/main/proto</directory> </additionalProtoPathElements>
<includes>
<include>FederationProtocol.proto</include>
<include>RouterProtocol.proto</include>
</includes>
</source>
</configuration> </configuration>
</execution> </execution>
</executions> </executions>

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.federation.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.federation.protocol.proto";
option java_outer_classname = "HdfsServerFederationProtos"; option java_outer_classname = "HdfsServerFederationProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "RouterProtocolProtos"; option java_outer_classname = "RouterProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -310,42 +310,30 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<executions>
<execution>
<id>src-compile-protoc</id>
<configuration>
<skip>false</skip>
<additionalProtoPathElements>
<additionalProtoPathElement>
${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
</additionalProtoPathElement>
<additionalProtoPathElement>
${basedir}/../hadoop-hdfs-client/src/main/proto
</additionalProtoPathElement>
</additionalProtoPathElements>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
<executions> <executions>
<execution>
<id>compile-protoc</id>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
<param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>HdfsServer.proto</include>
<include>DatanodeProtocol.proto</include>
<include>DatanodeLifelineProtocol.proto</include>
<include>HAZKInfo.proto</include>
<include>InterDatanodeProtocol.proto</include>
<include>JournalProtocol.proto</include>
<include>NamenodeProtocol.proto</include>
<include>QJournalProtocol.proto</include>
<include>editlog.proto</include>
<include>fsimage.proto</include>
<include>AliasMapProtocol.proto</include>
<include>InterQJournalProtocol.proto</include>
</includes>
</source>
</configuration>
</execution>
<execution> <execution>
<id>resource-gz</id> <id>resource-gz</id>
<phase>generate-resources</phase> <phase>generate-resources</phase>

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "AliasMapProtocolProtos"; option java_outer_classname = "AliasMapProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "DatanodeLifelineProtocolProtos"; option java_outer_classname = "DatanodeLifelineProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -24,7 +24,7 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "DatanodeProtocolProtos"; option java_outer_classname = "DatanodeProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.server.namenode.ha.proto"; option java_package = "org.apache.hadoop.hdfs.server.namenode.ha.proto";
option java_outer_classname = "HAZKInfoProtos"; option java_outer_classname = "HAZKInfoProtos";
package hadoop.hdfs; package hadoop.hdfs;

View File

@ -24,7 +24,7 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "HdfsServerProtos"; option java_outer_classname = "HdfsServerProtos";

View File

@ -24,7 +24,7 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "InterDatanodeProtocolProtos"; option java_outer_classname = "InterDatanodeProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.qjournal.protocol"; option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
option java_outer_classname = "InterQJournalProtocolProtos"; option java_outer_classname = "InterQJournalProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -24,7 +24,7 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "JournalProtocolProtos"; option java_outer_classname = "JournalProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -24,7 +24,7 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e. // This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols. // by the client, server, and data transfer protocols.
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "NamenodeProtocolProtos"; option java_outer_classname = "NamenodeProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -21,7 +21,7 @@
* Please see http://wiki.apache.org/hadoop/Compatibility * Please see http://wiki.apache.org/hadoop/Compatibility
* for what changes are allowed for a *stable* .proto interface. * for what changes are allowed for a *stable* .proto interface.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.qjournal.protocol"; option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
option java_outer_classname = "QJournalProtocolProtos"; option java_outer_classname = "QJournalProtocolProtos";
option java_generic_services = true; option java_generic_services = true;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.protocol.proto"; option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "EditLogProtos"; option java_outer_classname = "EditLogProtos";
option java_generate_equals_and_hash = true; option java_generate_equals_and_hash = true;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
syntax = "proto2";
option java_package = "org.apache.hadoop.hdfs.server.namenode"; option java_package = "org.apache.hadoop.hdfs.server.namenode";
option java_outer_classname = "FsImageProto"; option java_outer_classname = "FsImageProto";

View File

@ -82,6 +82,10 @@
<protobuf.version>2.5.0</protobuf.version> <protobuf.version>2.5.0</protobuf.version>
<protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path> <protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
<!-- Maven protoc compiler -->
<protobuf-maven-plugin.version>0.5.1</protobuf-maven-plugin.version>
<os-maven-plugin.version>1.5.0.Final</os-maven-plugin.version>
<zookeeper.version>3.4.13</zookeeper.version> <zookeeper.version>3.4.13</zookeeper.version>
<curator.version>2.13.0</curator.version> <curator.version>2.13.0</curator.version>
<findbugs.version>3.0.0</findbugs.version> <findbugs.version>3.0.0</findbugs.version>
@ -1529,9 +1533,56 @@
<artifactId>hadoop-maven-plugins</artifactId> <artifactId>hadoop-maven-plugins</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</plugin> </plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>${protobuf-maven-plugin.version}</version>
<extensions>true</extensions>
<configuration>
<protocArtifact>
com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
</protocArtifact>
<attachProtoSources>false</attachProtoSources>
</configuration>
<executions>
<execution>
<id>src-compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>compile</goal>
</goals>
<configuration>
<includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
<protoSourceRoot>${basedir}/src/main/proto</protoSourceRoot>
<outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
<clearOutputDirectory>false</clearOutputDirectory>
<skip>true</skip>
</configuration>
</execution>
<execution>
<id>src-test-compile-protoc</id>
<phase>generate-test-sources</phase>
<goals>
<goal>test-compile</goal>
</goals>
<configuration>
<protoTestSourceRoot>${basedir}/src/test/proto</protoTestSourceRoot>
<outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
<clearOutputDirectory>false</clearOutputDirectory>
<skip>true</skip>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</pluginManagement> </pluginManagement>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>${os-maven-plugin.version}</version>
</extension>
</extensions>
<plugins> <plugins>
<plugin> <plugin>
<artifactId>maven-clean-plugin</artifactId> <artifactId>maven-clean-plugin</artifactId>