From 738ff821dd092a1206cb39f6a024620df5710256 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Thu, 27 Oct 2016 13:17:07 -0700 Subject: [PATCH] Revert "HBASE-16952 Replace hadoop-maven-plugins with protobuf-maven-plugin for building protos" Revert premature commit This reverts commit d0e61b0e9ae3e998074834c500a663f9412629bc. --- hbase-endpoint/README.txt | 21 +- hbase-endpoint/pom.xml | 35 +- hbase-examples/README.txt | 1 + hbase-examples/pom.xml | 21 +- hbase-protocol-shaded/README.txt | 32 +- hbase-protocol-shaded/pom.xml | 55 +- .../src/main/protobuf/CellSetMessage.proto | 28 + .../src/main/protobuf/RowProcessor.proto | 45 ++ hbase-protocol/README.txt | 14 +- hbase-protocol/pom.xml | 51 +- .../generated/TestProcedureProtos.java | 530 ------------------ hbase-rest/README.txt | 26 - hbase-rest/pom.xml | 31 +- hbase-rsgroup/README.txt | 32 +- hbase-rsgroup/pom.xml | 28 - hbase-spark/README.txt | 21 +- hbase-spark/pom.xml | 21 +- pom.xml | 12 +- 18 files changed, 342 insertions(+), 662 deletions(-) create mode 100644 hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto create mode 100644 hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java delete mode 100644 hbase-rest/README.txt diff --git a/hbase-endpoint/README.txt b/hbase-endpoint/README.txt index fac9e055666..4deba00604c 100644 --- a/hbase-endpoint/README.txt +++ b/hbase-endpoint/README.txt @@ -18,16 +18,25 @@ v2.5.0 of protobuf, it is obtainable from here: https://github.com/google/protobuf/releases/tag/v2.5.0 -You can compile the protoc definitions by invoking maven with profile compile-protobuf -or passing in compile-protobuf property. +HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can +compile the protoc definitions by invoking maven with profile compile-protobuf or +passing in compile-protobuf property. - $ mvn compile -Dcompile-protobuf +mvn compile -Dcompile-protobuf or - $ mvn compile -Pcompile-protobuf +mvn compile -Pcompile-protobuf -You may also want to define protocExecutable for the protoc binary +You may also want to define protoc.path for the protoc binary - $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc +mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc + +If you have added a new proto file, you should add it to the pom.xml file first. +Other modules also support the maven profile. After you've done the above, check it in and then check it in (or post a patch on a JIRA with your definition file changes and the generated files). + +NOTE: The maven protoc plugin is a little broken. It will only source one dir +at a time. If changes in protobuf files, you will have to first do protoc with +the src directory pointing back into hbase-protocol module and then rerun it +after editing the pom to point in here to source .proto files. diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index 16fc2acd334..de05950de86 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -189,17 +189,44 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin + org.apache.hadoop + hadoop-maven-plugins compile-protoc generate-sources - compile + protoc - ${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf + + + ${basedir}/src/main/protobuf + ${basedir}/../hbase-protocol/src/main/protobuf + + + + ${basedir}/../hbase-protocol/src/main/protobuf + ${basedir}/src/main/protobuf + + + + Aggregate.proto + BulkDelete.proto + DummyRegionServerEndpoint.proto + ColumnAggregationNullResponseProtocol.proto + ColumnAggregationProtocol.proto + ColumnAggregationWithErrorsProtocol.proto + IncrementCounterProcessor.proto + SecureBulkLoad.proto + + + + ${basedir}/src/main/java/ diff --git a/hbase-examples/README.txt b/hbase-examples/README.txt index 3252a8089b9..78051a630ca 100644 --- a/hbase-examples/README.txt +++ b/hbase-examples/README.txt @@ -65,3 +65,4 @@ Example code. Also includes example coprocessor endpoint examples. The protobuf files are at src/main/protobuf. See hbase-protocol README.txt for how to generate the example RowCountService Coprocessor Endpoint and Aggregator examples. + diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 9fcb6ce49c0..2238857f09b 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -179,15 +179,30 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin + org.apache.hadoop + hadoop-maven-plugins compile-protoc generate-sources - compile + protoc + + + ${basedir}/src/main/protobuf + + + ${basedir}/src/main/protobuf + + + Examples.proto + + + + ${basedir}/src/main/java/ + diff --git a/hbase-protocol-shaded/README.txt b/hbase-protocol-shaded/README.txt index 3e886f48379..b009643528c 100644 --- a/hbase-protocol-shaded/README.txt +++ b/hbase-protocol-shaded/README.txt @@ -1,6 +1,4 @@ Please read carefully as the 'menu options' have changed. -What you do in here is not what you do elsewhere to generate -proto java files. This module has proto files used by core. These protos overlap with protos that are used by coprocessor endpoints @@ -22,9 +20,26 @@ Finally, this module also includes patches applied on top of protobuf to add functionality not yet in protobuf that we need now. +The shaded generated java files, including the patched protobuf +source files are all checked in. + If you make changes to protos, to the protobuf version or to -the patches you want to apply to protobuf, you must rerun the -below step and then check in what it generated: +the patches you want to apply to protobuf, you must rerun this +step. + +First ensure that the appropriate protobuf protoc tool is in +your $PATH as in: + + $ export PATH=~/bin/protobuf-3.1.0/src:$PATH + +.. or pass -Dprotoc.path=PATH_TO_PROTOC when running +the below mvn commands. NOTE: The protoc that we use internally +is very likely NOT what is used over in the hbase-protocol +module (here we'd use a 3.1.0 where in hbase-protocol we'll +use something older, a 2.5.0). You may need to download protobuf and +build protoc first. + +Run: $ mvn install -Dcompile-protobuf @@ -32,20 +47,15 @@ or $ mvn install -Pcompille-protobuf +to build and trigger the special generate-shaded-classes profile. When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded will have been updated. Make sure all builds and then carefully check in the changes. Files may have been added or removed by the steps above. -The protobuf version used internally by hbase differs from what -is used over in the CPEP hbase-protocol module but in here, the -mvn takes care of ensuring we have the right protobuf in place so -you don't have to. - If you have patches for the protobuf, add them to src/main/patches directory. They will be applied after protobuf is shaded and unbundled into src/main/java. See the pom.xml under the generate-shaded-classes profile -for more info on how this step works; it is a little involved -and a bit messy but all in the name of saving you pain. +for more info on how this step works. diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index d703c817d00..01845ae2fd0 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -40,13 +40,6 @@ src/main/java - - - kr.motd.maven - os-maven-plugin - 1.4.0.Final - - ${sources.dir} @@ -223,20 +216,58 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin + org.apache.hadoop + hadoop-maven-plugins - com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier} + ${internal.protobuf.version} compile-protoc generate-sources - compile + protoc - ${sources.dir} + + ${basedir}/src/main/protobuf + + + ${basedir}/src/main/protobuf + + + Admin.proto + Cell.proto + Client.proto + ClusterId.proto + ClusterStatus.proto + Comparator.proto + Encryption.proto + ErrorHandling.proto + FS.proto + Filter.proto + HBase.proto + HFile.proto + LoadBalancer.proto + MapReduce.proto + Master.proto + MasterProcedure.proto + Procedure.proto + Quota.proto + RPC.proto + RegionNormalizer.proto + RegionServerStatus.proto + Snapshot.proto + Tracing.proto + WAL.proto + ZooKeeper.proto + TestProcedure.proto + test.proto + test_rpc_service.proto + + + ${sources.dir} diff --git a/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto b/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto new file mode 100644 index 00000000000..4c404290455 --- /dev/null +++ b/hbase-protocol-shaded/src/main/protobuf/CellSetMessage.proto @@ -0,0 +1,28 @@ +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import "CellMessage.proto"; + +package org.apache.hadoop.hbase.shaded.rest.protobuf.generated; + +message CellSet { + message Row { + required bytes key = 1; + repeated Cell values = 2; + } + repeated Row rows = 1; +} diff --git a/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto b/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto new file mode 100644 index 00000000000..80fe6065f87 --- /dev/null +++ b/hbase-protocol-shaded/src/main/protobuf/RowProcessor.proto @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines a protocol to perform multi row transactions. + * See BaseRowProcessorEndpoint for the implementation. + * See HRegion#processRowsWithLocks() for details. + */ +package hbase.pb; + +option java_package = "org.apache.hadoop.hbase.shaded.protobuf.generated"; +option java_outer_classname = "RowProcessorProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +message ProcessRequest { + required string row_processor_class_name = 1; + optional string row_processor_initializer_message_name = 2; + optional bytes row_processor_initializer_message = 3; + optional uint64 nonce_group = 4; + optional uint64 nonce = 5; +} + +message ProcessResponse { + required bytes row_processor_result = 1; +} + +service RowProcessorService { + rpc Process(ProcessRequest) returns (ProcessResponse); +} diff --git a/hbase-protocol/README.txt b/hbase-protocol/README.txt index 4a77bf408dc..a3e11a26a5f 100644 --- a/hbase-protocol/README.txt +++ b/hbase-protocol/README.txt @@ -15,21 +15,23 @@ protobuf, it is obtainable from here: https://github.com/google/protobuf/releases/tag/v2.5.0 -To generate java files from protos run: +HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can +compile the protoc definitions by invoking maven with profile compile-protobuf or +passing in compile-protobuf property. - $ mvn compile -Dcompile-protobuf +mvn compile -Dcompile-protobuf or - $ mvn compile -Pcompile-protobuf +mvn compile -Pcompile-protobuf -You may also want to define protocExecutable for the protoc binary +You may also want to define protoc.path for the protoc binary -mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc +mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc If you have added a new proto file, you should add it to the pom.xml file first. Other modules also support the maven profile. NOTE: The protoc used here is probably NOT the same as the hbase-protocol-shaded -module uses; here we use a more palatable version -- 2.5.0 -- whereas over in +module uses; here we use a more palatable version -- 2.5.0 -- wherease over in the internal hbase-protocol-shaded module, we'd use something newer. Be conscious of this when running your protoc being sure to apply the appropriate version per module. diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index a3d99b2fe35..e21617bffcd 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -159,15 +159,60 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin + org.apache.hadoop + hadoop-maven-plugins compile-protoc generate-sources - compile + protoc + + + ${basedir}/src/main/protobuf + + + ${basedir}/src/main/protobuf + + + AccessControl.proto + Admin.proto + Authentication.proto + Cell.proto + Client.proto + ClusterId.proto + ClusterStatus.proto + Comparator.proto + Encryption.proto + ErrorHandling.proto + FS.proto + Filter.proto + HBase.proto + HFile.proto + LoadBalancer.proto + MapReduce.proto + MultiRowMutation.proto + Quota.proto + RPC.proto + RowProcessor.proto + Snapshot.proto + + test.proto + test_rpc_service.proto + Tracing.proto + VisibilityLabels.proto + WAL.proto + ZooKeeper.proto + PingProtocol.proto + + + + ${basedir}/src/main/java/ + diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java deleted file mode 100644 index f06555045f6..00000000000 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java +++ /dev/null @@ -1,530 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: TestProcedure.proto - -package org.apache.hadoop.hbase.ipc.protobuf.generated; - -public final class TestProcedureProtos { - private TestProcedureProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface TestTableDDLStateDataOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string table_name = 1; - /** - * required string table_name = 1; - */ - boolean hasTableName(); - /** - * required string table_name = 1; - */ - java.lang.String getTableName(); - /** - * required string table_name = 1; - */ - com.google.protobuf.ByteString - getTableNameBytes(); - } - /** - * Protobuf type {@code TestTableDDLStateData} - */ - public static final class TestTableDDLStateData extends - com.google.protobuf.GeneratedMessage - implements TestTableDDLStateDataOrBuilder { - // Use TestTableDDLStateData.newBuilder() to construct. - private TestTableDDLStateData(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TestTableDDLStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TestTableDDLStateData defaultInstance; - public static TestTableDDLStateData getDefaultInstance() { - return defaultInstance; - } - - public TestTableDDLStateData getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private TestTableDDLStateData( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TestTableDDLStateData parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TestTableDDLStateData(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required string table_name = 1; - public static final int TABLE_NAME_FIELD_NUMBER = 1; - private java.lang.Object tableName_; - /** - * required string table_name = 1; - */ - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string table_name = 1; - */ - public java.lang.String getTableName() { - java.lang.Object ref = tableName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - tableName_ = s; - } - return s; - } - } - /** - * required string table_name = 1; - */ - public com.google.protobuf.ByteString - getTableNameBytes() { - java.lang.Object ref = tableName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - tableName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - tableName_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getTableNameBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getTableNameBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code TestTableDDLStateData} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateDataOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.internal_static_TestTableDDLStateData_descriptor; - } - - public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData getDefaultInstanceForType() { - return org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData build() { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData buildPartial() { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) { - return mergeFrom((org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData other) { - if (other == org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData.getDefaultInstance()) return this; - if (other.hasTableName()) { - bitField0_ |= 0x00000001; - tableName_ = other.tableName_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required string table_name = 1; - private java.lang.Object tableName_ = ""; - /** - * required string table_name = 1; - */ - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string table_name = 1; - */ - public java.lang.String getTableName() { - java.lang.Object ref = tableName_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - tableName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string table_name = 1; - */ - public com.google.protobuf.ByteString - getTableNameBytes() { - java.lang.Object ref = tableName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - tableName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string table_name = 1; - */ - public Builder setTableName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - /** - * required string table_name = 1; - */ - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - /** - * required string table_name = 1; - */ - public Builder setTableNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:TestTableDDLStateData) - } - - static { - defaultInstance = new TestTableDDLStateData(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TestTableDDLStateData) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TestTableDDLStateData_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TestTableDDLStateData_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\023TestProcedure.proto\"+\n\025TestTableDDLSta" + - "teData\022\022\n\ntable_name\030\001 \002(\tBH\n.org.apache" + - ".hadoop.hbase.ipc.protobuf.generatedB\023Te" + - "stProcedureProtos\210\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_TestTableDDLStateData_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_TestTableDDLStateData_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TestTableDDLStateData_descriptor, - new java.lang.String[] { "TableName", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-rest/README.txt b/hbase-rest/README.txt deleted file mode 100644 index 402f7dfe872..00000000000 --- a/hbase-rest/README.txt +++ /dev/null @@ -1,26 +0,0 @@ -This maven module has the protobuf definition files used by REST. - -The produced java classes are generated and then checked in. The reasoning is -that they change infrequently. - -To regenerate the classes after making definition file changes, in here or over -in hbase-protocol since we source some of those protos in this package, ensure -first that the protobuf protoc tool is in your $PATH. You may need to download -it and build it first; it is part of the protobuf package. For example, if using -v2.5.0 of protobuf, it is obtainable from here: - - https://github.com/google/protobuf/releases/tag/v2.5.0 - -You can compile the protoc definitions by invoking maven with profile compile-protobuf -or passing in compile-protobuf property. - - $ mvn compile -Dcompile-protobuf -or - $ mvn compile -Pcompile-protobuf - -You may also want to define protocExecutable for the protoc binary - - $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc - -After you've done the above, check it in and then check it in (or post a patch -on a JIRA with your definition file changes and the generated files). diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index 53fbad452be..8a270d7d507 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -353,15 +353,40 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin + org.apache.hadoop + hadoop-maven-plugins compile-protoc generate-sources - compile + protoc + + + ${basedir}/src/main/protobuf + + + ${basedir}/src/main/protobuf + + + CellMessage.proto + CellSetMessage.proto + ColumnSchemaMessage.proto + NamespacePropertiesMessage.proto + NamespacesMessage.proto + ScannerMessage.proto + StorageClusterStatusMessage.proto + TableInfoMessage.proto + TableListMessage.proto + TableSchemaMessage.proto + VersionMessage.proto + + + + ${basedir}/src/main/java/ + diff --git a/hbase-rsgroup/README.txt b/hbase-rsgroup/README.txt index 0491a2ae916..1e247a94527 100644 --- a/hbase-rsgroup/README.txt +++ b/hbase-rsgroup/README.txt @@ -1,26 +1,30 @@ -This maven module has the protobuf definition files used by regionserver grouping. +These are the protobuf definition files used by the region grouping feature. +The protos here are used by the region grouping coprocessor endpoints. -The produced java classes are generated and then checked in. The reasoning is -that they change infrequently. +The produced java classes are generated and then checked in. The reasoning +is that they change infrequently and it saves generating anew on each build. -To regenerate the classes after making definition file changes, in here or over -in hbase-protocol since we source some of those protos in this package, ensure -first that the protobuf protoc tool is in your $PATH. You may need to download -it and build it first; it is part of the protobuf package. For example, if using -v2.5.0 of protobuf, it is obtainable from here: +To regenerate the classes after making definition file changes, ensure first that +the protobuf protoc tool is in your $PATH. You may need to download it and build +it first; its part of the protobuf package. For example, if using v2.5.0 of +protobuf, it is obtainable from here: https://github.com/google/protobuf/releases/tag/v2.5.0 -You can compile the protoc definitions by invoking maven with profile compile-protobuf -or passing in compile-protobuf property. +HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can +compile the protoc definitions by invoking maven with profile compile-protobuf or +passing in compile-protobuf property. - $ mvn compile -Dcompile-protobuf +mvn compile -Dcompile-protobuf or - $ mvn compile -Pcompile-protobuf +mvn compile -Pcompile-protobuf -You may also want to define protocExecutable for the protoc binary +You may also want to define protoc.path for the protoc binary - $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc +mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc + +If you have added a new proto file, you should add it to the pom.xml file first. +Other modules also support the maven profile. After you've done the above, check it in and then check it in (or post a patch on a JIRA with your definition file changes and the generated files). diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index 7534a65c769..58802cf7ac9 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -226,34 +226,6 @@ true - - compile-protobuf - - - compile-protobuf - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - - - compile-protoc - generate-sources - - compile - - - ${basedir}/src/main/protobuf/,${basedir}/../hbase-protocol/src/main/protobuf - - - - - - - diff --git a/hbase-spark/README.txt b/hbase-spark/README.txt index a60a964bb33..5569924fdcd 100644 --- a/hbase-spark/README.txt +++ b/hbase-spark/README.txt @@ -11,16 +11,25 @@ v2.5.0 of protobuf, it is obtainable from here: https://github.com/google/protobuf/releases/tag/v2.5.0 -You can compile the protoc definitions by invoking maven with profile compile-protobuf -or passing in compile-protobuf property. +HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can +compile the protoc definitions by invoking maven with profile compile-protobuf or +passing in compile-protobuf property. - $ mvn compile -Dcompile-protobuf +mvn compile -Dcompile-protobuf or - $ mvn compile -Pcompile-protobuf +mvn compile -Pcompile-protobuf -You may also want to define protocExecutable for the protoc binary +You may also want to define protoc.path for the protoc binary - $ mvn compile -Dcompile-protobuf -DprotocExecutable=/opt/local/bin/protoc +mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc + +If you have added a new proto file, you should add it to the pom.xml file first. +Other modules also support the maven profile. After you've done the above, check it in and then check it in (or post a patch on a JIRA with your definition file changes and the generated files). + +NOTE: The maven protoc plugin is a little broken. It will only source one dir +at a time. If changes in protobuf files, you will have to first do protoc with +the src directory pointing back into hbase-protocol module and then rerun it +after editing the pom to point in here to source .proto files. diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml index 0a96f9c084a..a60b384607d 100644 --- a/hbase-spark/pom.xml +++ b/hbase-spark/pom.xml @@ -716,15 +716,30 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin + org.apache.hadoop + hadoop-maven-plugins compile-protoc generate-sources - compile + protoc + + + ${basedir}/src/main/protobuf + + + ${basedir}/src/main/protobuf + + + SparkFilter.proto + + + + ${basedir}/src/main/java/ + diff --git a/pom.xml b/pom.xml index 38c8978a3d7..ad77f058ac5 100644 --- a/pom.xml +++ b/pom.xml @@ -811,13 +811,12 @@ - org.xolstice.maven.plugins - protobuf-maven-plugin - ${protobuf.plugin.version} + org.apache.hadoop + hadoop-maven-plugins + ${hadoop-two.version} - ${basedir}/src/main/protobuf/ - ${basedir}/src/main/java/ - false + ${protobuf.version} + ${protoc.path} @@ -1216,7 +1215,6 @@ 1.2.17 1.10.8 2.5.0 - 0.5.0 thrift 0.9.3 3.4.8