HBASE-10020 Add maven compile-protobuf profile

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1546237 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Enis Soztutar 2013-11-27 23:57:23 +00:00
parent 12f0dce3ae
commit 770e336ef6
17 changed files with 1150 additions and 668 deletions

View File

@ -1,36 +0,0 @@
#!/bin/bash
##
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# script to run protoc to generate protocol buf files.
# usage: ./build-proto.sh
#
which protoc
if [ $? != 0 ] ; then
echo "Must have protoc compiler in your path to generate code"
exit 1
fi
HBASE_DIR=`dirname $0`/..
PROTO_DIR=$HBASE_DIR/hbase-protocol/src/main/protobuf
JAVA_DIR=$HBASE_DIR/hbase-protocol/src/main/java
set -x
for f in $PROTO_DIR/*.proto ; do
protoc -I$PROTO_DIR --java_out=$JAVA_DIR $f
done

View File

@ -245,5 +245,44 @@ if we can combine these profiles somehow -->
</plugins>
</build>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/main/protobuf</param>
<param>${basedir}/../hbase-protocol/src/main/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/main/protobuf</directory>
<includes>
<include>BulkDelete.proto</include>
<include>Examples.proto</include>
</includes>
</source>
<output>${basedir}/src/main/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -11,14 +11,18 @@ public final class ExampleProtos {
public interface CountRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code CountRequest}
*/
public static final class CountRequest extends
com.google.protobuf.GeneratedMessage
implements CountRequestOrBuilder {
// Use CountRequest.newBuilder() to construct.
private CountRequest(Builder builder) {
private CountRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CountRequest(boolean noInit) {}
private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CountRequest defaultInstance;
public static CountRequest getDefaultInstance() {
@ -29,6 +33,46 @@ public final class ExampleProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CountRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
@ -36,7 +80,24 @@ public final class ExampleProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
}
public static com.google.protobuf.Parser<CountRequest> PARSER =
new com.google.protobuf.AbstractParser<CountRequest>() {
public CountRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CountRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CountRequest> getParserForType() {
return PARSER;
}
private void initFields() {
@ -90,79 +151,70 @@ public final class ExampleProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -178,6 +230,9 @@ public final class ExampleProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code CountRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder {
@ -188,7 +243,9 @@ public final class ExampleProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.newBuilder()
@ -196,7 +253,8 @@ public final class ExampleProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -219,7 +277,7 @@ public final class ExampleProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDescriptor();
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() {
@ -234,16 +292,6 @@ public final class ExampleProtos {
return result;
}
private org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest(this);
onBuilt();
@ -273,29 +321,19 @@ public final class ExampleProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:CountRequest)
}
@ -312,17 +350,27 @@ public final class ExampleProtos {
extends com.google.protobuf.MessageOrBuilder {
// required int64 count = 1 [default = 0];
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
boolean hasCount();
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
long getCount();
}
/**
* Protobuf type {@code CountResponse}
*/
public static final class CountResponse extends
com.google.protobuf.GeneratedMessage
implements CountResponseOrBuilder {
// Use CountResponse.newBuilder() to construct.
private CountResponse(Builder builder) {
private CountResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CountResponse(boolean noInit) {}
private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CountResponse defaultInstance;
public static CountResponse getDefaultInstance() {
@ -333,6 +381,52 @@ public final class ExampleProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CountResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
count_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
@ -340,16 +434,39 @@ public final class ExampleProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
}
public static com.google.protobuf.Parser<CountResponse> PARSER =
new com.google.protobuf.AbstractParser<CountResponse>() {
public CountResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CountResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CountResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int64 count = 1 [default = 0];
public static final int COUNT_FIELD_NUMBER = 1;
private long count_;
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
public boolean hasCount() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
public long getCount() {
return count_;
}
@ -422,8 +539,12 @@ public final class ExampleProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasCount()) {
@ -431,74 +552,61 @@ public final class ExampleProtos {
hash = (53 * hash) + hashLong(getCount());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -514,6 +622,9 @@ public final class ExampleProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code CountResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder {
@ -524,7 +635,9 @@ public final class ExampleProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.newBuilder()
@ -532,7 +645,8 @@ public final class ExampleProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -557,7 +671,7 @@ public final class ExampleProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDescriptor();
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() {
@ -572,16 +686,6 @@ public final class ExampleProtos {
return result;
}
private org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildPartial() {
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse(this);
int from_bitField0_ = bitField0_;
@ -625,50 +729,47 @@ public final class ExampleProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
count_ = input.readInt64();
break;
}
}
}
}
private int bitField0_;
// required int64 count = 1 [default = 0];
private long count_ ;
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
public boolean hasCount() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
public long getCount() {
return count_;
}
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
public Builder setCount(long value) {
bitField0_ |= 0x00000001;
count_ = value;
onChanged();
return this;
}
/**
* <code>required int64 count = 1 [default = 0];</code>
*/
public Builder clearCount() {
bitField0_ = (bitField0_ & ~0x00000001);
count_ = 0L;
@ -687,16 +788,25 @@ public final class ExampleProtos {
// @@protoc_insertion_point(class_scope:CountResponse)
}
/**
* Protobuf service {@code RowCountService}
*/
public static abstract class RowCountService
implements com.google.protobuf.Service {
protected RowCountService() {}
public interface Interface {
/**
* <code>rpc getRowCount(.CountRequest) returns (.CountResponse);</code>
*/
public abstract void getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
/**
* <code>rpc getKeyValueCount(.CountRequest) returns (.CountResponse);</code>
*/
public abstract void getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
@ -793,11 +903,17 @@ public final class ExampleProtos {
};
}
/**
* <code>rpc getRowCount(.CountRequest) returns (.CountResponse);</code>
*/
public abstract void getRowCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
/**
* <code>rpc getKeyValueCount(.CountRequest) returns (.CountResponse);</code>
*/
public abstract void getKeyValueCount(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
@ -971,6 +1087,8 @@ public final class ExampleProtos {
}
}
// @@protoc_insertion_point(class_scope:RowCountService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1010,17 +1128,13 @@ public final class ExampleProtos {
internal_static_CountRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CountRequest_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
new java.lang.String[] { });
internal_static_CountResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_CountResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CountResponse_descriptor,
new java.lang.String[] { "Count", },
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class,
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
new java.lang.String[] { "Count", });
return null;
}
};

View File

@ -7,28 +7,21 @@ the protobuf protoc tool is in your $PATH (You may need to download it and build
it first; its part of the protobuf package obtainable from here:
http://code.google.com/p/protobuf/downloads/list).
Then run the following (You should be able to just copy and paste the below into a
terminal and hit return -- the protoc compiler runs fast):
HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
compile the protoc definitions by invoking maven with profile compile-protobuf or
passing in compile-protobuf property.
UNIX_PROTO_DIR=src/main/protobuf
JAVA_DIR=src/main/java/
mkdir -p $JAVA_DIR 2> /dev/null
if which cygpath 2> /dev/null; then
PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR`
JAVA_DIR=`cygpath --windows $JAVA_DIR`
else
PROTO_DIR=$UNIX_PROTO_DIR
fi
# uncomment the next line if you want to remove before generating
# rm -fr $JAVA_DIR/org/apache/hadoop/hbase/protobuf/generated
for PROTO_FILE in $UNIX_PROTO_DIR/*.proto
do
protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE
done
mvn compile -Dcompile-protobuf
or
mvn compile -Pcompile-protobuf
You may also want to define protoc.path for the protoc binary
mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
If you have added a new proto file, you should add it to the pom.xml file first.
Other modules also support the maven profile.
After you've done the above, check it in and then check it in (or post a patch
on a JIRA with your definition file changes and the generated files).
Optionally, you can uncomment the hadoop-maven-plugins plugin in hbase-protocol/pom.xml.
This plugin will generate for the classes during the build. Once again, you will need protocol buffers
to be installed on your build machine (https://developers.google.com/protocol-buffers)

View File

@ -69,63 +69,6 @@
</execution>
</executions>
</plugin>
<!--
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<version>2.0.5-alpha</version>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
<imports>
<param>${basedir}/src/main/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/main/protobuf</directory>
<includes>
<include>AccessControl.proto</include>
<include>Admin.proto</include>
<include>Aggregate.proto</include>
<include>Authentication.proto</include>
<include>Cell.proto</include>
<include>Client.proto</include>
<include>ClusterId.proto</include>
<include>ClusterStatus.proto</include>
<include>Comparator.proto</include>
<include>ErrorHandling.proto</include>
<include>Filter.proto</include>
<include>FS.proto</include>
<include>HBase.proto</include>
<include>HFile.proto</include>
<include>LoadBalancer.proto</include>
<include>MapReduce.proto</include>
<include>MasterAdmin.proto</include>
<include>MasterMonitor.proto</include>
<include>Master.proto</include>
<include>MultiRowMutationProcessorMessages.proto</include>
<include>MultiRowMutation.proto</include>
<include>RegionServerStatus.proto</include>
<include>RowProcessor.proto</include>
<include>RPC.proto</include>
<include>SecureBulkLoad.proto</include>
<include>Tracing.proto</include>
<include>WAL.proto</include>
<include>ZooKeeper.proto</include>
</includes>
</source>
<output>${project.build.directory}/generated-sources/java</output>
</configuration>
</execution>
</executions>
</plugin>
-->
</plugins>
<pluginManagement>
<plugins>
@ -180,6 +123,67 @@
<surefire.skipFirstPart>true</surefire.skipFirstPart>
</properties>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/main/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/main/protobuf</directory>
<includes>
<include>AccessControl.proto</include>
<include>Admin.proto</include>
<include>Aggregate.proto</include>
<include>Authentication.proto</include>
<include>Cell.proto</include>
<include>Client.proto</include>
<include>ClusterId.proto</include>
<include>ClusterStatus.proto</include>
<include>Comparator.proto</include>
<include>ErrorHandling.proto</include>
<include>Filter.proto</include>
<include>FS.proto</include>
<include>HBase.proto</include>
<include>HFile.proto</include>
<include>LoadBalancer.proto</include>
<include>MapReduce.proto</include>
<include>Master.proto</include>
<include>MultiRowMutation.proto</include>
<include>RegionServerStatus.proto</include>
<include>RowProcessor.proto</include>
<include>RPC.proto</include>
<include>SecureBulkLoad.proto</include>
<include>Tracing.proto</include>
<include>WAL.proto</include>
<include>ZooKeeper.proto</include>
</includes>
</source>
<!--<output>${project.build.directory}/generated-sources/java</output>-->
<output>${basedir}/src/main/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -685,5 +685,75 @@
</plugins>
</build>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf</param>
</imports>
<source>
<!-- These should be under src/main/protobuf -->
<directory>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf</directory>
<includes>
<include>CellMessage.proto</include>
<include>CellSetMessage.proto</include>
<include>ColumnSchemaMessage.proto</include>
<include>ScannerMessage.proto</include>
<include>StorageClusterStatusMessage.proto</include>
<include>TableInfoMessage.proto</include>
<include>TableListMessage.proto</include>
<include>TableSchemaMessage.proto</include>
<include>VersionMessage.proto</include>
</includes>
</source>
<output>${basedir}/src/main/java/</output>
</configuration>
</execution>
<execution>
<id>compile-test-protoc</id>
<phase>generate-test-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/test/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/test/protobuf</directory>
<includes>
<include>ColumnAggregationProtocol.proto</include>
<include>IncrementCounterProcessor.proto</include>
<include>PingProtocol.proto</include>
<include>test.proto</include>
<include>test_delayed_rpc.proto</include>
<include>test_rpc_service.proto</include>
</includes>
</source>
<output>${basedir}/src/test/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -690,6 +690,15 @@
<attach>false</attach>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<version>${hadoop-two.version}</version>
<configuration>
<protocVersion>${protobuf.version}</protocVersion>
<protocCommand>${protoc.path}</protocCommand>
</configuration>
</plugin>
</plugins>
</pluginManagement>
<plugins>

View File

@ -165,11 +165,21 @@ mvn clean package -DskipTests
</para>
</section>
<section xml:id="build.protobuf"><title>Build Protobuf</title>
<para>You may need to change the protobuf definitions that reside in the hbase-protocol module.</para>
<para>You may need to change the protobuf definitions that reside in the hbase-protocol module or other modules.</para>
<para>
The protobuf files are located in <link xlink:href="https://svn.apache.org/repos/asf/hbase/trunk/hbase-protocol/src/main/protobuf">hbase-protocol/src/main/protobuf</link>.
For the change to be effective, you will need to
regenerate the classes (read the <link xlink:href="https://svn.apache.org/repos/asf/hbase/trunk/hbase-protocol/README.txt">hbase-protocol/README.txt</link> for more details).
For the change to be effective, you will need to regenerate the classes. You can use maven profile compile-protobuf to do this.
<programlisting>
mvn compile -Dcompile-protobuf
or
mvn compile -Pcompile-protobuf
</programlisting>
You may also want to define protoc.path for the protoc binary
<programlisting>
mvn compile -Dcompile-protobuf -Dprotoc.path=/opt/local/bin/protoc
</programlisting>
Read the <link xlink:href="https://svn.apache.org/repos/asf/hbase/trunk/hbase-protocol/README.txt">hbase-protocol/README.txt</link> for more details.
</para>
</section>