HDFS-4909. Avoid protocol buffer RPC namespace clashes (cmccabe)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1588091 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Colin McCabe 2014-04-16 22:58:40 +00:00
parent 28d62bf75b
commit 9a0dec60ff
5 changed files with 12 additions and 73 deletions

View File

@ -357,6 +357,8 @@ Release 2.5.0 - UNRELEASED
HDFS-5409. TestOfflineEditsViewer#testStored fails on Windows due to CRLF
line endings in editsStored.xml from git checkout. (cnauroth)
HDFS-4909. Avoid protocol buffer RPC namespace clashes. (cmccabe)
Release 2.4.1 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -430,81 +430,18 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>ClientDatanodeProtocol.proto</include>
<include>ClientNamenodeProtocol.proto</include>
<include>DatanodeProtocol.proto</include>
<include>HAZKInfo.proto</include>
<include>InterDatanodeProtocol.proto</include>
<include>JournalProtocol.proto</include>
<include>datatransfer.proto</include>
<include>hdfs.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>
</configuration>
</execution>
<execution>
<id>compile-protoc-datanode</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>ClientDatanodeProtocol.proto</include>
<include>DatanodeProtocol.proto</include>
<include>fsimage.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>
</configuration>
</execution>
<execution>
<id>compile-protoc-namenode</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>ClientNamenodeProtocol.proto</include>
<include>NamenodeProtocol.proto</include>
<include>acl.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>
</configuration>
</execution>
<execution>
<id>compile-protoc-qjournal</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
<param>${basedir}/src/main/proto</param>
</imports>
<source>
<directory>${basedir}/src/main/proto</directory>
<includes>
<include>QJournalProtocol.proto</include>
<include>acl.proto</include>
<include>datatransfer.proto</include>
<include>fsimage.proto</include>
<include>hdfs.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>

View File

@ -29,7 +29,7 @@ option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "DatanodeProtocolProtos";
option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
package hadoop.hdfs.datanode;
import "HAServiceProtocol.proto";
import "hdfs.proto";

View File

@ -29,7 +29,7 @@ option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "NamenodeProtocolProtos";
option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
package hadoop.hdfs.namenode;
import "hdfs.proto";

View File

@ -26,7 +26,7 @@ option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
option java_outer_classname = "QJournalProtocolProtos";
option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
package hadoop.hdfs.qjournal;
import "hdfs.proto";