From ad0e862f78d1bc772e3fa313491bac38018ada3f Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Thu, 27 Oct 2016 13:17:59 -0700 Subject: [PATCH] "HBASE-16952 Replace hadoop-maven-plugins with protobuf-maven-plugin for building protos"" Rely on the new plugin to do all proto generation. No need of an external protoc setup anymore. Mvn will do it all for you. Updated all READMEs appropriately. Signed-off-by: Michael Stack --- hbase-endpoint/README.txt | 56 +- hbase-endpoint/pom.xml | 35 +- hbase-examples/README.txt | 27 +- hbase-examples/pom.xml | 21 +- hbase-protocol-shaded/README.txt | 33 +- hbase-protocol-shaded/pom.xml | 50 +- .../src/main/protobuf/CellSetMessage.proto | 28 - .../src/main/protobuf/RowProcessor.proto | 45 -- hbase-protocol/README.txt | 55 +- hbase-protocol/pom.xml | 51 +- .../generated/TestProcedureProtos.java | 530 ++++++++++++++++++ hbase-rest/README.txt | 16 + hbase-rest/pom.xml | 31 +- hbase-rsgroup/README.txt | 38 +- hbase-rsgroup/pom.xml | 28 + hbase-spark/README.txt | 42 +- hbase-spark/pom.xml | 21 +- pom.xml | 25 +- 18 files changed, 712 insertions(+), 420 deletions(-) delete mode 100644 hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto delete mode 100644 hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto create mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java create mode 100644 hbase-rest/README.txt diff --git a/hbase-endpoint/README.txt b/hbase-endpoint/README.txt index 4deba00604c..861a77681bf 100644 --- a/hbase-endpoint/README.txt +++ b/hbase-endpoint/README.txt @@ -1,42 +1,24 @@ -This maven module has the protobuf definition files used by hbase Coprocessor -Endpoints that ship with hbase core including tests. Coprocessor Endpoints -are meant to be standalone, independent code not reliant on hbase internals. -They define their Service using protobuf. The protobuf version they use can be -distinct from that used by HBase internally since HBase started shading its -protobuf references. Endpoints have no access to the shaded protobuf hbase uses. -They do have access to the content of hbase-protocol but avoid using as much -of this as you can as it is liable to change. +ON PROTOBUFS +This maven module has protobuf definition files ('.protos') used by hbase +Coprocessor Endpoints that ship with hbase core including tests. Coprocessor +Endpoints are meant to be standalone, independent code not reliant on hbase +internals. They define their Service using protobuf. The protobuf version +they use can be distinct from that used by HBase internally since HBase started +shading its protobuf references. Endpoints have no access to the shaded protobuf +hbase uses. They do have access to the content of hbase-protocol but avoid using +as much of this as you can as it is liable to change. -The produced java classes are generated and then checked in. The reasoning is -that they change infrequently. +Generation of java files from protobuf .proto files included here is done apart +from the build. Run the generation whenever you make changes to the .orotos files +and then check in the produced java (The reasoning is that change is infrequent +so why pay the price of generating files anew on each build. -To regenerate the classes after making definition file changes, in here or over -in hbase-protocol since we source some of those protos in this package, ensure -first that the protobuf protoc tool is in your $PATH. You may need to download -it and build it first; it is part of the protobuf package. For example, if using -v2.5.0 of protobuf, it is obtainable from here: +To generate java files from protos run: - https://github.com/google/protobuf/releases/tag/v2.5.0 - -HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can -compile the protoc definitions by invoking maven with profile compile-protobuf or -passing in compile-protobuf property. - -mvn compile -Dcompile-protobuf + $ mvn compile -Dcompile-protobuf or -mvn compile -Pcompile-protobuf + $ mvn compile -Pcompile-protobuf -You may also want to define protoc.path for the protoc binary - -mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc - -If you have added a new proto file, you should add it to the pom.xml file first. -Other modules also support the maven profile. - -After you've done the above, check it in and then check it in (or post a patch -on a JIRA with your definition file changes and the generated files). - -NOTE: The maven protoc plugin is a little broken. It will only source one dir -at a time. If changes in protobuf files, you will have to first do protoc with -the src directory pointing back into hbase-protocol module and then rerun it -after editing the pom to point in here to source .proto files. +After you've done the above, check it and then check in changes (or post a patch +on a JIRA with your definition file changes and the generated files). Be careful +to notice new files and files removed and do appropriate git rm/adds. diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index de05950de86..16fc2acd334 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -189,44 +189,17 @@ - org.apache.hadoop - hadoop-maven-plugins + org.xolstice.maven.plugins + protobuf-maven-plugin compile-protoc generate-sources - protoc + compile - - - ${basedir}/src/main/protobuf - ${basedir}/../hbase-protocol/src/main/protobuf - - - - ${basedir}/../hbase-protocol/src/main/protobuf - ${basedir}/src/main/protobuf - - - - Aggregate.proto - BulkDelete.proto - DummyRegionServerEndpoint.proto - ColumnAggregationNullResponseProtocol.proto - ColumnAggregationProtocol.proto - ColumnAggregationWithErrorsProtocol.proto - IncrementCounterProcessor.proto - SecureBulkLoad.proto - - - - ${basedir}/src/main/java/ + ${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf diff --git a/hbase-examples/README.txt b/hbase-examples/README.txt index 78051a630ca..c47ed4f7fdc 100644 --- a/hbase-examples/README.txt +++ b/hbase-examples/README.txt @@ -62,7 +62,28 @@ Example code. 2. Execute {make}. 3. Execute {./DemoClient}. -Also includes example coprocessor endpoint examples. The protobuf files are at src/main/protobuf. -See hbase-protocol README.txt for how to generate the example RowCountService Coprocessor -Endpoint and Aggregator examples. +ON PROTOBUFS +This maven module has protobuf definition files ('.protos') used by hbase +Coprocessor Endpoints examples including tests. Coprocessor +Endpoints are meant to be standalone, independent code not reliant on hbase +internals. They define their Service using protobuf. The protobuf version +they use can be distinct from that used by HBase internally since HBase started +shading its protobuf references. Endpoints have no access to the shaded protobuf +hbase uses. They do have access to the content of hbase-protocol -- the +.protos found in here -- but avoid using as much of this as you can as it is +liable to change. +Generation of java files from protobuf .proto files included here is done apart +from the build. Run the generation whenever you make changes to the .orotos files +and then check in the produced java (The reasoning is that change is infrequent +so why pay the price of generating files anew on each build. + +To generate java files from protos run: + + $ mvn compile -Dcompile-protobuf +or + $ mvn compile -Pcompile-protobuf + +After you've done the above, check it and then check in changes (or post a patch +on a JIRA with your definition file changes and the generated files). Be careful +to notice new files and files removed and do appropriate git rm/adds. diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 2238857f09b..9fcb6ce49c0 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -179,30 +179,15 @@ - org.apache.hadoop - hadoop-maven-plugins + org.xolstice.maven.plugins + protobuf-maven-plugin compile-protoc generate-sources - protoc + compile - - - ${basedir}/src/main/protobuf - - - ${basedir}/src/main/protobuf - - - Examples.proto - - - - ${basedir}/src/main/java/ - diff --git a/hbase-protocol-shaded/README.txt b/hbase-protocol-shaded/README.txt index b009643528c..afe8829447c 100644 --- a/hbase-protocol-shaded/README.txt +++ b/hbase-protocol-shaded/README.txt @@ -1,4 +1,6 @@ Please read carefully as the 'menu options' have changed. +What you do in here is not what you do elsewhere to generate +proto java files. This module has proto files used by core. These protos overlap with protos that are used by coprocessor endpoints @@ -20,26 +22,9 @@ Finally, this module also includes patches applied on top of protobuf to add functionality not yet in protobuf that we need now. -The shaded generated java files, including the patched protobuf -source files are all checked in. - If you make changes to protos, to the protobuf version or to -the patches you want to apply to protobuf, you must rerun this -step. - -First ensure that the appropriate protobuf protoc tool is in -your $PATH as in: - - $ export PATH=~/bin/protobuf-3.1.0/src:$PATH - -.. or pass -Dprotoc.path=PATH_TO_PROTOC when running -the below mvn commands. NOTE: The protoc that we use internally -is very likely NOT what is used over in the hbase-protocol -module (here we'd use a 3.1.0 where in hbase-protocol we'll -use something older, a 2.5.0). You may need to download protobuf and -build protoc first. - -Run: +the patches you want to apply to protobuf, you must rerun the +below step and then check in what it generated: $ mvn install -Dcompile-protobuf @@ -47,15 +32,17 @@ or $ mvn install -Pcompille-protobuf -to build and trigger the special generate-shaded-classes profile. +NOTE: 'install' above whereas other proto generation only needs 'compile' + When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded will have been updated. Make sure all builds and then carefully check in the changes. Files may have been added or removed by the steps above. +The protobuf version used internally by hbase differs from what +is used over in the CPEP hbase-protocol module but mvn takes care +of ensuring we have the right protobuf in place so you don't have to. + If you have patches for the protobuf, add them to src/main/patches directory. They will be applied after protobuf is shaded and unbundled into src/main/java. - -See the pom.xml under the generate-shaded-classes profile -for more info on how this step works. diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index 01845ae2fd0..87750faa487 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -216,58 +216,18 @@ - org.apache.hadoop - hadoop-maven-plugins - - ${internal.protobuf.version} - + org.xolstice.maven.plugins + protobuf-maven-plugin compile-protoc generate-sources - protoc + compile - - ${basedir}/src/main/protobuf - - - ${basedir}/src/main/protobuf - - - Admin.proto - Cell.proto - Client.proto - ClusterId.proto - ClusterStatus.proto - Comparator.proto - Encryption.proto - ErrorHandling.proto - FS.proto - Filter.proto - HBase.proto - HFile.proto - LoadBalancer.proto - MapReduce.proto - Master.proto - MasterProcedure.proto - Procedure.proto - Quota.proto - RPC.proto - RegionNormalizer.proto - RegionServerStatus.proto - Snapshot.proto - Tracing.proto - WAL.proto - ZooKeeper.proto - TestProcedure.proto - test.proto - test_rpc_service.proto - - - ${sources.dir} + com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier} + ${sources.dir} diff --git a/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto b/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto deleted file mode 100644 index 4c404290455..00000000000 --- a/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto +++ /dev/null @@ -1,28 +0,0 @@ -// -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import "CellMessage.proto"; - -package org.apache.hadoop.hbase.shaded.rest.protobuf.generated; - -message CellSet { - message Row { - required bytes key = 1; - repeated Cell values = 2; - } - repeated Row rows = 1; -} diff --git a/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto b/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto deleted file mode 100644 index 80fe6065f87..00000000000 --- a/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * Defines a protocol to perform multi row transactions. - * See BaseRowProcessorEndpoint for the implementation. - * See HRegion#processRowsWithLocks() for details. - */ -package hbase.pb; - -option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated"; -option java_outer_classname = "RowProcessorProtos"; -option java_generic_services = true; -option java_generate_equals_and_hash = true; -option optimize_for = SPEED; - -message ProcessRequest { - required string row_processor_class_name = 1; - optional string row_processor_initializer_message_name = 2; - optional bytes row_processor_initializer_message = 3; - optional uint64 nonce_group = 4; - optional uint64 nonce = 5; -} - -message ProcessResponse { - required bytes row_processor_result = 1; -} - -service RowProcessorService { - rpc Process(ProcessRequest) returns (ProcessResponse); -} diff --git a/hbase-protocol/README.txt b/hbase-protocol/README.txt index a3e11a26a5f..729bc30891c 100644 --- a/hbase-protocol/README.txt +++ b/hbase-protocol/README.txt @@ -1,40 +1,25 @@ -These are the protobuf definition files used by hbase Coprocessor Endpoints. -HBase core uses protos found at hbase-protocol-shaded/src/main/protos. The -protos here are also in hbase-module-shaded though they are not exactly -the same files (they generate into different location; where to generate -to is part of the .proto file). Consider whether any changes made belong -both here and over in hbase-module-shaded. +ON PROTOBUFS +This maven module has core protobuf definition files ('.protos') used by hbase +Coprocessor Endpoints that ship with hbase core including tests. Coprocessor +Endpoints are meant to be standalone, independent code not reliant on hbase +internals. They define their Service using protobuf. The protobuf version +they use can be distinct from that used by HBase internally since HBase started +shading its protobuf references. Endpoints have no access to the shaded protobuf +hbase uses. They do have access to the content of hbase-protocol -- the +.protos found in here -- but avoid using as much of this as you can as it is +liable to change. -The produced java classes are generated and then checked in. The reasoning -is that they change infrequently and it saves generating anew on each build. +Generation of java files from protobuf .proto files included here is done apart +from the build. Run the generation whenever you make changes to the .orotos files +and then check in the produced java (The reasoning is that change is infrequent +so why pay the price of generating files anew on each build. -To regenerate the classes after making definition file changes, ensure first that -the protobuf protoc tool is in your $PATH. You may need to download it and build -it first; its part of the protobuf package. For example, if using v2.5.0 of -protobuf, it is obtainable from here: +To generate java files from protos run: - https://github.com/google/protobuf/releases/tag/v2.5.0 - -HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can -compile the protoc definitions by invoking maven with profile compile-protobuf or -passing in compile-protobuf property. - -mvn compile -Dcompile-protobuf + $ mvn compile -Dcompile-protobuf or -mvn compile -Pcompile-protobuf + $ mvn compile -Pcompile-protobuf -You may also want to define protoc.path for the protoc binary - -mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc - -If you have added a new proto file, you should add it to the pom.xml file first. -Other modules also support the maven profile. - -NOTE: The protoc used here is probably NOT the same as the hbase-protocol-shaded -module uses; here we use a more palatable version -- 2.5.0 -- wherease over in -the internal hbase-protocol-shaded module, we'd use something newer. Be conscious -of this when running your protoc being sure to apply the appropriate version -per module. - -After you've done the above, check it in and then check it in (or post a patch -on a JIRA with your definition file changes and the generated files). +After you've done the above, check it and then check in changes (or post a patch +on a JIRA with your definition file changes and the generated files). Be careful +to notice new files and files removed and do appropriate git rm/adds. diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index e21617bffcd..a3d99b2fe35 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -159,60 +159,15 @@ - org.apache.hadoop - hadoop-maven-plugins + org.xolstice.maven.plugins + protobuf-maven-plugin compile-protoc generate-sources - protoc + compile - - - ${basedir}/src/main/protobuf - - - ${basedir}/src/main/protobuf - - - AccessControl.proto - Admin.proto - Authentication.proto - Cell.proto - Client.proto - ClusterId.proto - ClusterStatus.proto - Comparator.proto - Encryption.proto - ErrorHandling.proto - FS.proto - Filter.proto - HBase.proto - HFile.proto - LoadBalancer.proto - MapReduce.proto - MultiRowMutation.proto - Quota.proto - RPC.proto - RowProcessor.proto - Snapshot.proto - - test.proto - test_rpc_service.proto - Tracing.proto - VisibilityLabels.proto - WAL.proto - ZooKeeper.proto - PingProtocol.proto - - - - ${basedir}/src/main/java/ - diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java new file mode 100644 index 00000000000..f06555045f6 --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java @@ -0,0 +1,530 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: TestProcedure.proto + +package org.apache.hadoop.hbase.ipc.protobuf.generated; + +public final class TestProcedureProtos { + private TestProcedureProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface TestTableDDLStateDataOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string table_name = 1; + /** + * required string table_name = 1; + */ + boolean hasTableName(); + /** + * required string table_name = 1; + */ + java.lang.String getTableName(); + /** + * required string table_name = 1; + */ + com.google.protobuf.ByteString + getTableNameBytes(); + } + /** + * Protobuf type {@code TestTableDDLStateData} + */ + public static final class TestTableDDLStateData extends + com.google.protobuf.GeneratedMessage + implements TestTableDDLStateDataOrBuilder { + // Use TestTableDDLStateData.newBuilder() to construct. + private TestTableDDLStateData(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private TestTableDDLStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final TestTableDDLStateData defaultInstance; + public static TestTableDDLStateData getDefaultInstance() { + return defaultInstance; + } + + public TestTableDDLStateData getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TestTableDDLStateData( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public TestTableDDLStateData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new TestTableDDLStateData(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string table_name = 1; + public static final int TABLE_NAME_FIELD_NUMBER = 1; + private java.lang.Object tableName_; + /** + * required string table_name = 1; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string table_name = 1; + */ + public java.lang.String getTableName() { + java.lang.Object ref = tableName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + tableName_ = s; + } + return s; + } + } + /** + * required string table_name = 1; + */ + public com.google.protobuf.ByteString + getTableNameBytes() { + java.lang.Object ref = tableName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + tableName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + tableName_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getTableNameBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getTableNameBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code TestTableDDLStateData} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; + } + + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstanceForType() { + return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData build() { + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData buildPartial() { + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) { + return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData other) { + if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance()) return this; + if (other.hasTableName()) { + bitField0_ |= 0x00000001; + tableName_ = other.tableName_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string table_name = 1; + private java.lang.Object tableName_ = ""; + /** + * required string table_name = 1; + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string table_name = 1; + */ + public java.lang.String getTableName() { + java.lang.Object ref = tableName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + tableName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string table_name = 1; + */ + public com.google.protobuf.ByteString + getTableNameBytes() { + java.lang.Object ref = tableName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + tableName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string table_name = 1; + */ + public Builder setTableName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + /** + * required string table_name = 1; + */ + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + /** + * required string table_name = 1; + */ + public Builder setTableNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TestTableDDLStateData) + } + + static { + defaultInstance = new TestTableDDLStateData(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TestTableDDLStateData) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TestTableDDLStateData_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TestTableDDLStateData_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\023TestProcedure.proto\"+\n\025TestTableDDLSta" + + "teData\022\022\n\ntable_name\030\001 \002(\tBH\n.org.apache" + + ".hadoop.hbase.ipc.protobuf.generatedB\023Te" + + "stProcedureProtos\210\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_TestTableDDLStateData_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_TestTableDDLStateData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TestTableDDLStateData_descriptor, + new java.lang.String[] { "TableName", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-rest/README.txt b/hbase-rest/README.txt new file mode 100644 index 00000000000..192feaf36e6 --- /dev/null +++ b/hbase-rest/README.txt @@ -0,0 +1,16 @@ +ON PROTOBUFS +This maven module has core protobuf definition files ('.protos') used by hbase +REST. Generation of java files from protobuf .proto files included here is done apart +from the build. Run the generation whenever you make changes to the .orotos files +and then check in the produced java (The reasoning is that change is infrequent +so why pay the price of generating files anew on each build. + +To generate java files from protos run: + + $ mvn compile -Dcompile-protobuf +or + $ mvn compile -Pcompile-protobuf + +After you've done the above, check it and then check in changes (or post a patch +on a JIRA with your definition file changes and the generated files). Be careful +to notice new files and files removed and do appropriate git rm/adds. diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index 8a270d7d507..53fbad452be 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -353,40 +353,15 @@ - org.apache.hadoop - hadoop-maven-plugins + org.xolstice.maven.plugins + protobuf-maven-plugin compile-protoc generate-sources - protoc + compile - - - ${basedir}/src/main/protobuf - - - ${basedir}/src/main/protobuf - - - CellMessage.proto - CellSetMessage.proto - ColumnSchemaMessage.proto - NamespacePropertiesMessage.proto - NamespacesMessage.proto - ScannerMessage.proto - StorageClusterStatusMessage.proto - TableInfoMessage.proto - TableListMessage.proto - TableSchemaMessage.proto - VersionMessage.proto - - - - ${basedir}/src/main/java/ - diff --git a/hbase-rsgroup/README.txt b/hbase-rsgroup/README.txt index 1e247a94527..f2d1306d716 100644 --- a/hbase-rsgroup/README.txt +++ b/hbase-rsgroup/README.txt @@ -1,30 +1,18 @@ -These are the protobuf definition files used by the region grouping feature. -The protos here are used by the region grouping coprocessor endpoints. +ON PROTOBUFS +This maven module has core protobuf definition files ('.protos') used by hbase +table gropuing Coprocessor Endpoints. -The produced java classes are generated and then checked in. The reasoning -is that they change infrequently and it saves generating anew on each build. +Generation of java files from protobuf .proto files included here is done apart +from the build. Run the generation whenever you make changes to the .orotos files +and then check in the produced java (The reasoning is that change is infrequent +so why pay the price of generating files anew on each build. -To regenerate the classes after making definition file changes, ensure first that -the protobuf protoc tool is in your $PATH. You may need to download it and build -it first; its part of the protobuf package. For example, if using v2.5.0 of -protobuf, it is obtainable from here: +To generate java files from protos run: - https://github.com/google/protobuf/releases/tag/v2.5.0 - -HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can -compile the protoc definitions by invoking maven with profile compile-protobuf or -passing in compile-protobuf property. - -mvn compile -Dcompile-protobuf + $ mvn compile -Dcompile-protobuf or -mvn compile -Pcompile-protobuf + $ mvn compile -Pcompile-protobuf -You may also want to define protoc.path for the protoc binary - -mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc - -If you have added a new proto file, you should add it to the pom.xml file first. -Other modules also support the maven profile. - -After you've done the above, check it in and then check it in (or post a patch -on a JIRA with your definition file changes and the generated files). +After you've done the above, check it and then check in changes (or post a patch +on a JIRA with your definition file changes and the generated files). Be careful +to notice new files and files removed and do appropriate git rm/adds. diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index 58802cf7ac9..7534a65c769 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -226,6 +226,34 @@ true + + compile-protobuf + + + compile-protobuf + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + compile-protoc + generate-sources + + compile + + + ${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf + + + + + + + diff --git a/hbase-spark/README.txt b/hbase-spark/README.txt index 5569924fdcd..fe2c09d81b3 100644 --- a/hbase-spark/README.txt +++ b/hbase-spark/README.txt @@ -1,35 +1,19 @@ -This maven module has the protobuf definition files used by spark. -The produced java classes are generated and then checked in. The reasoning is -that they change infrequently. +ON PROTOBUFS +This maven module has core protobuf definition files ('.protos') used by hbase +Spark. -To regenerate the classes after making definition file changes, in here or over -in hbase-protocol since we source some of those protos in this package, ensure -first that the protobuf protoc tool is in your $PATH. You may need to download -it and build it first; it is part of the protobuf package. For example, if using -v2.5.0 of protobuf, it is obtainable from here: +Generation of java files from protobuf .proto files included here is done apart +from the build. Run the generation whenever you make changes to the .orotos files +and then check in the produced java (The reasoning is that change is infrequent +so why pay the price of generating files anew on each build. - https://github.com/google/protobuf/releases/tag/v2.5.0 +To generate java files from protos run: -HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can -compile the protoc definitions by invoking maven with profile compile-protobuf or -passing in compile-protobuf property. - -mvn compile -Dcompile-protobuf + $ mvn compile -Dcompile-protobuf or -mvn compile -Pcompile-protobuf + $ mvn compile -Pcompile-protobuf -You may also want to define protoc.path for the protoc binary - -mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc - -If you have added a new proto file, you should add it to the pom.xml file first. -Other modules also support the maven profile. - -After you've done the above, check it in and then check it in (or post a patch -on a JIRA with your definition file changes and the generated files). - -NOTE: The maven protoc plugin is a little broken. It will only source one dir -at a time. If changes in protobuf files, you will have to first do protoc with -the src directory pointing back into hbase-protocol module and then rerun it -after editing the pom to point in here to source .proto files. +After you've done the above, check it and then check in changes (or post a patch +on a JIRA with your definition file changes and the generated files). Be careful +to notice new files and files removed and do appropriate git rm/adds. diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml index a60b384607d..0a96f9c084a 100644 --- a/hbase-spark/pom.xml +++ b/hbase-spark/pom.xml @@ -716,30 +716,15 @@ - org.apache.hadoop - hadoop-maven-plugins + org.xolstice.maven.plugins + protobuf-maven-plugin compile-protoc generate-sources - protoc + compile - - - ${basedir}/src/main/protobuf - - - ${basedir}/src/main/protobuf - - - SparkFilter.proto - - - - ${basedir}/src/main/java/ - diff --git a/pom.xml b/pom.xml index 65b8adbd14b..79488eecb5d 100644 --- a/pom.xml +++ b/pom.xml @@ -474,6 +474,13 @@ + + + kr.motd.maven + os-maven-plugin + 1.4.0.Final + + @@ -811,12 +818,14 @@ - org.apache.hadoop - hadoop-maven-plugins - ${hadoop-two.version} + org.xolstice.maven.plugins + protobuf-maven-plugin + ${protobuf.plugin.version} - ${protobuf.version} - ${protoc.path} + com.google.protobuf:protoc:${external.protobuf.version}:exe:${os.detected.classifier} + ${basedir}/src/main/protobuf/ + ${basedir}/src/main/java/ + false @@ -1214,7 +1223,9 @@ 3.1.0-incubating 1.2.17 1.10.8 - 2.5.0 + + 2.5.0 + 0.5.0 thrift 0.9.3 3.4.8 @@ -1726,7 +1737,7 @@ com.google.protobuf protobuf-java - ${protobuf.version} + ${external.protobuf.version} com.sun.jersey