HBASE-16263 Move all to do w/ protobuf -- *.proto files and generated classes -- under hbase-protocol

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
stack 2016-07-20 16:25:40 -07:00
parent ce657d5911
commit 9d740f7b8b
54 changed files with 199 additions and 334 deletions

View File

@ -300,44 +300,5 @@ if we can combine these profiles somehow -->
</plugins>
</build>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/main/protobuf</param>
<param>${basedir}/../hbase-protocol/src/main/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/main/protobuf</directory>
<includes>
<include>BulkDelete.proto</include>
<include>Examples.proto</include>
</includes>
</source>
<output>${basedir}/src/main/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -1,11 +1,18 @@
These are the protobuf definition files used by hbase. The produced java
classes are generated into src/main/java/org/apache/hadoop/hbase/protobuf/generated
These are the protobuf definition files used by hbase. ALL protobuf proto files
must live in this module whether test or spark or coprocessor endpoint protos
because we are being careful about what we expose of protobuf to downstreamers;
we are shading our version of protobuf so we can freely change it as needed.
The produced java classes are generated into
src/main/java/org/apache/hadoop/hbase/protobuf/generated
and then checked in. The reasoning is that they change infrequently.
To regenerate the classes after making definition file changes, ensure first that
the protobuf protoc tool is in your $PATH (You may need to download it and build
it first; its part of the protobuf package obtainable from here:
https://github.com/google/protobuf/releases/tag/v2.5.0).
the protobuf protoc tool is in your $PATH. You may need to download it and build
it first; its part of the protobuf package. For example, if using v2.5.0 of
protobuf, it is obtainable from here:
https://github.com/google/protobuf/releases/tag/v2.5.0
HBase uses hadoop-maven-plugins:protoc goal to invoke the protoc command. You can
compile the protoc definitions by invoking maven with profile compile-protobuf or
@ -24,4 +31,3 @@ Other modules also support the maven profile.
After you've done the above, check it in and then check it in (or post a patch
on a JIRA with your definition file changes and the generated files).

View File

@ -203,6 +203,27 @@
<include>VisibilityLabels.proto</include>
<include>WAL.proto</include>
<include>ZooKeeper.proto</include>
<include>SparkFilter.proto</include>
<include>ColumnAggregationNullResponseProtocol.proto</include>
<include>ColumnAggregationProtocol.proto</include>
<include>ColumnAggregationWithErrorsProtocol.proto</include>
<include>DummyRegionServerEndpoint.proto</include>
<include>IncrementCounterProcessor.proto</include>
<include>PingProtocol.proto</include>
<include>TestProcedure.proto</include>
<include>test.proto</include>
<include>test_rpc_service.proto</include>
<include>CellMessage.proto</include>
<include>CellSetMessage.proto</include>
<include>ColumnSchemaMessage.proto</include>
<include>NamespacePropertiesMessage.proto</include>
<include>NamespacesMessage.proto</include>
<include>ScannerMessage.proto</include>
<include>StorageClusterStatusMessage.proto</include>
<include>TableInfoMessage.proto</include>
<include>TableListMessage.proto</include>
<include>TableSchemaMessage.proto</include>
<include>VersionMessage.proto</include>
</includes>
</source>
<!--<output>${project.build.directory}/generated-sources/java</output>-->

View File

@ -1,10 +1,10 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Filter.proto
// source: SparkFilter.proto
package org.apache.hadoop.hbase.spark.protobuf.generated;
public final class FilterProtos {
private FilterProtos() {}
public final class SparkFilterProtos {
private SparkFilterProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
@ -126,14 +126,14 @@ public final class FilterProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder.class);
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.class, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder.class);
}
public static com.google.protobuf.Parser<SQLPredicatePushDownCellToColumnMapping> PARSER =
@ -303,10 +303,10 @@ public final class FilterProtos {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping)) {
if (!(obj instanceof org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping other = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping) obj;
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping other = (org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping) obj;
boolean result = true;
result = result && (hasColumnFamily() == other.hasColumnFamily());
@ -354,53 +354,53 @@ public final class FilterProtos {
return hash;
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(byte[] data)
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(java.io.InputStream input)
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseDelimitedFrom(java.io.InputStream input)
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseDelimitedFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@ -409,7 +409,7 @@ public final class FilterProtos {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping prototype) {
public static Builder newBuilder(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@ -425,20 +425,20 @@ public final class FilterProtos {
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder {
implements org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder.class);
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.class, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder.class);
}
// Construct using org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder()
// Construct using org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
@ -473,23 +473,23 @@ public final class FilterProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownCellToColumnMapping_descriptor;
}
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getDefaultInstanceForType() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance();
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping getDefaultInstanceForType() {
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance();
}
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping build() {
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping result = buildPartial();
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping build() {
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping buildPartial() {
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping result = new org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping(this);
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping buildPartial() {
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping result = new org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@ -510,16 +510,16 @@ public final class FilterProtos {
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping) {
return mergeFrom((org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping)other);
if (other instanceof org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping) {
return mergeFrom((org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping other) {
if (other == org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance()) return this;
public Builder mergeFrom(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping other) {
if (other == org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance()) return this;
if (other.hasColumnFamily()) {
setColumnFamily(other.getColumnFamily());
}
@ -555,11 +555,11 @@ public final class FilterProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping parsedMessage = null;
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping) e.getUnfinishedMessage();
parsedMessage = (org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
@ -763,12 +763,12 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping>
java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping>
getCellToColumnMappingList();
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index);
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index);
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
@ -776,12 +776,12 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
getCellToColumnMappingOrBuilderList();
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
int index);
// optional string encoderClassName = 4;
@ -865,10 +865,10 @@ public final class FilterProtos {
}
case 26: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
cellToColumnMapping_ = new java.util.ArrayList<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping>();
cellToColumnMapping_ = new java.util.ArrayList<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping>();
mutable_bitField0_ |= 0x00000004;
}
cellToColumnMapping_.add(input.readMessage(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.PARSER, extensionRegistry));
cellToColumnMapping_.add(input.readMessage(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.PARSER, extensionRegistry));
break;
}
case 34: {
@ -896,14 +896,14 @@ public final class FilterProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.Builder.class);
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.class, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.Builder.class);
}
public static com.google.protobuf.Parser<SQLPredicatePushDownFilter> PARSER =
@ -990,17 +990,17 @@ public final class FilterProtos {
// repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
public static final int CELL_TO_COLUMN_MAPPING_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> cellToColumnMapping_;
private java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping> cellToColumnMapping_;
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> getCellToColumnMappingList() {
public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping> getCellToColumnMappingList() {
return cellToColumnMapping_;
}
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
public java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
getCellToColumnMappingOrBuilderList() {
return cellToColumnMapping_;
}
@ -1013,13 +1013,13 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index) {
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index) {
return cellToColumnMapping_.get(index);
}
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
int index) {
return cellToColumnMapping_.get(index);
}
@ -1154,10 +1154,10 @@ public final class FilterProtos {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter)) {
if (!(obj instanceof org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter)) {
return super.equals(obj);
}
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter other = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter) obj;
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter other = (org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter) obj;
boolean result = true;
result = result && (hasDynamicLogicExpression() == other.hasDynamicLogicExpression());
@ -1208,53 +1208,53 @@ public final class FilterProtos {
return hash;
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(byte[] data)
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(java.io.InputStream input)
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseDelimitedFrom(java.io.InputStream input)
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseDelimitedFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parseFrom(
public static org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@ -1263,7 +1263,7 @@ public final class FilterProtos {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter prototype) {
public static Builder newBuilder(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@ -1279,20 +1279,20 @@ public final class FilterProtos {
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilterOrBuilder {
implements org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilterOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.class, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.Builder.class);
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.class, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.Builder.class);
}
// Construct using org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.newBuilder()
// Construct using org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
@ -1334,23 +1334,23 @@ public final class FilterProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.internal_static_hbase_pb_SQLPredicatePushDownFilter_descriptor;
}
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.getDefaultInstance();
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter getDefaultInstanceForType() {
return org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.getDefaultInstance();
}
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter build() {
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter result = buildPartial();
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter build() {
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter buildPartial() {
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter result = new org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter(this);
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter buildPartial() {
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter result = new org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
@ -1381,16 +1381,16 @@ public final class FilterProtos {
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter) {
return mergeFrom((org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter)other);
if (other instanceof org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter) {
return mergeFrom((org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter other) {
if (other == org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter.getDefaultInstance()) return this;
public Builder mergeFrom(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter other) {
if (other == org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter.getDefaultInstance()) return this;
if (other.hasDynamicLogicExpression()) {
bitField0_ |= 0x00000001;
dynamicLogicExpression_ = other.dynamicLogicExpression_;
@ -1459,11 +1459,11 @@ public final class FilterProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter parsedMessage = null;
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownFilter) e.getUnfinishedMessage();
parsedMessage = (org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownFilter) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
@ -1621,22 +1621,22 @@ public final class FilterProtos {
}
// repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;
private java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> cellToColumnMapping_ =
private java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping> cellToColumnMapping_ =
java.util.Collections.emptyList();
private void ensureCellToColumnMappingIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
cellToColumnMapping_ = new java.util.ArrayList<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping>(cellToColumnMapping_);
cellToColumnMapping_ = new java.util.ArrayList<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping>(cellToColumnMapping_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder> cellToColumnMappingBuilder_;
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder> cellToColumnMappingBuilder_;
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> getCellToColumnMappingList() {
public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping> getCellToColumnMappingList() {
if (cellToColumnMappingBuilder_ == null) {
return java.util.Collections.unmodifiableList(cellToColumnMapping_);
} else {
@ -1656,7 +1656,7 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index) {
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping getCellToColumnMapping(int index) {
if (cellToColumnMappingBuilder_ == null) {
return cellToColumnMapping_.get(index);
} else {
@ -1667,7 +1667,7 @@ public final class FilterProtos {
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder setCellToColumnMapping(
int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
int index, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
if (cellToColumnMappingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@ -1684,7 +1684,7 @@ public final class FilterProtos {
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder setCellToColumnMapping(
int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
int index, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
if (cellToColumnMappingBuilder_ == null) {
ensureCellToColumnMappingIsMutable();
cellToColumnMapping_.set(index, builderForValue.build());
@ -1697,7 +1697,7 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder addCellToColumnMapping(org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
public Builder addCellToColumnMapping(org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
if (cellToColumnMappingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@ -1714,7 +1714,7 @@ public final class FilterProtos {
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder addCellToColumnMapping(
int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
int index, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping value) {
if (cellToColumnMappingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@ -1731,7 +1731,7 @@ public final class FilterProtos {
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder addCellToColumnMapping(
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
if (cellToColumnMappingBuilder_ == null) {
ensureCellToColumnMappingIsMutable();
cellToColumnMapping_.add(builderForValue.build());
@ -1745,7 +1745,7 @@ public final class FilterProtos {
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder addCellToColumnMapping(
int index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
int index, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder builderForValue) {
if (cellToColumnMappingBuilder_ == null) {
ensureCellToColumnMappingIsMutable();
cellToColumnMapping_.add(index, builderForValue.build());
@ -1759,7 +1759,7 @@ public final class FilterProtos {
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public Builder addAllCellToColumnMapping(
java.lang.Iterable<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping> values) {
java.lang.Iterable<? extends org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping> values) {
if (cellToColumnMappingBuilder_ == null) {
ensureCellToColumnMappingIsMutable();
super.addAll(values, cellToColumnMapping_);
@ -1798,14 +1798,14 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder getCellToColumnMappingBuilder(
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder getCellToColumnMappingBuilder(
int index) {
return getCellToColumnMappingFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder getCellToColumnMappingOrBuilder(
int index) {
if (cellToColumnMappingBuilder_ == null) {
return cellToColumnMapping_.get(index); } else {
@ -1815,7 +1815,7 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
public java.util.List<? extends org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
getCellToColumnMappingOrBuilderList() {
if (cellToColumnMappingBuilder_ != null) {
return cellToColumnMappingBuilder_.getMessageOrBuilderList();
@ -1826,31 +1826,31 @@ public final class FilterProtos {
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder addCellToColumnMappingBuilder() {
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder addCellToColumnMappingBuilder() {
return getCellToColumnMappingFieldBuilder().addBuilder(
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance());
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder addCellToColumnMappingBuilder(
public org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder addCellToColumnMappingBuilder(
int index) {
return getCellToColumnMappingFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance());
index, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.getDefaultInstance());
}
/**
* <code>repeated .hbase.pb.SQLPredicatePushDownCellToColumnMapping cell_to_column_mapping = 3;</code>
*/
public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder>
public java.util.List<org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder>
getCellToColumnMappingBuilderList() {
return getCellToColumnMappingFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>
getCellToColumnMappingFieldBuilder() {
if (cellToColumnMappingBuilder_ == null) {
cellToColumnMappingBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>(
org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder, org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos.SQLPredicatePushDownCellToColumnMappingOrBuilder>(
cellToColumnMapping_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
@ -1964,17 +1964,17 @@ public final class FilterProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\014Filter.proto\022\010hbase.pb\"h\n\'SQLPredicate" +
"PushDownCellToColumnMapping\022\025\n\rcolumn_fa" +
"mily\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014\022\023\n\013column_" +
"name\030\003 \002(\t\"\313\001\n\032SQLPredicatePushDownFilte" +
"r\022 \n\030dynamic_logic_expression\030\001 \002(\t\022\036\n\026v" +
"alue_from_query_array\030\002 \003(\014\022Q\n\026cell_to_c" +
"olumn_mapping\030\003 \003(\01321.hbase.pb.SQLPredic" +
"atePushDownCellToColumnMapping\022\030\n\020encode" +
"rClassName\030\004 \001(\tBH\n0org.apache.hadoop.hb" +
"ase.spark.protobuf.generatedB\014FilterProt",
"osH\001\210\001\001\240\001\001"
"\n\021SparkFilter.proto\022\010hbase.pb\"h\n\'SQLPred" +
"icatePushDownCellToColumnMapping\022\025\n\rcolu" +
"mn_family\030\001 \002(\014\022\021\n\tqualifier\030\002 \002(\014\022\023\n\013co" +
"lumn_name\030\003 \002(\t\"\313\001\n\032SQLPredicatePushDown" +
"Filter\022 \n\030dynamic_logic_expression\030\001 \002(\t" +
"\022\036\n\026value_from_query_array\030\002 \003(\014\022Q\n\026cell" +
"_to_column_mapping\030\003 \003(\01321.hbase.pb.SQLP" +
"redicatePushDownCellToColumnMapping\022\030\n\020e" +
"ncoderClassName\030\004 \001(\tBM\n0org.apache.hado" +
"op.hbase.spark.protobuf.generatedB\021Spark",
"FilterProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

View File

@ -16,11 +16,12 @@
* limitations under the License.
*/
// This file contains protocol buffers that are used for filters
// This file contains protocol buffers that are used for Spark filters
// over in the hbase-spark module
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.spark.protobuf.generated";
option java_outer_classname = "FilterProtos";
option java_outer_classname = "SparkFilterProtos";
option java_generic_services = true;
option java_generate_equals_and_hash = true;
option optimize_for = SPEED;

View File

@ -186,6 +186,11 @@
<artifactId>hbase-common</artifactId>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol</artifactId>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
@ -334,56 +339,6 @@
<surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
</param>
</imports>
<source>
<!-- These should be under src/main/protobuf -->
<directory>${basedir}/src/main/resources/org/apache/hadoop/hbase/rest/protobuf
</directory>
<includes>
<include>CellMessage.proto</include>
<include>CellSetMessage.proto</include>
<include>ColumnSchemaMessage.proto</include>
<include>NamespacePropertiesMessage.proto</include>
<include>NamespacesMessage.proto</include>
<include>ScannerMessage.proto</include>
<include>StorageClusterStatusMessage.proto</include>
<include>TableInfoMessage.proto</include>
<include>TableListMessage.proto</include>
<include>TableSchemaMessage.proto</include>
<include>VersionMessage.proto</include>
</includes>
</source>
<output>${basedir}/src/main/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Hadoop-specific dependencies -->
<!-- profile for building against Hadoop 2.0.x
This is the default.

View File

@ -859,50 +859,5 @@
</plugins>
</build>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-test-protoc</id>
<phase>generate-test-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/test/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/test/protobuf</directory>
<includes>
<include>ColumnAggregationNullResponseProtocol.proto</include>
<include>ColumnAggregationProtocol.proto</include>
<include>ColumnAggregationWithErrorsProtocol.proto</include>
<include>DummyRegionServerEndpoint.proto</include>
<include>IncrementCounterProcessor.proto</include>
<include>PingProtocol.proto</include>
<include>TestProcedure.proto</include>
<include>test.proto</include>
<include>test_rpc_service.proto</include>
</includes>
</source>
<output>${basedir}/src/test/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -663,43 +663,5 @@
<surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties>
</profile>
<profile>
<id>compile-protobuf</id>
<activation>
<property>
<name>compile-protobuf</name>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
<id>compile-protoc</id>
<phase>generate-sources</phase>
<goals>
<goal>protoc</goal>
</goals>
<configuration>
<imports>
<param>${basedir}/src/main/protobuf</param>
</imports>
<source>
<directory>${basedir}/src/main/protobuf</directory>
<includes>
<include>Filter.proto</include>
</includes>
</source>
<!--<output>${project.build.directory}/generated-sources/java</output>-->
<output>${basedir}/src/main/java/</output>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.spark.datasources.BytesEncoder;
import org.apache.hadoop.hbase.spark.datasources.JavaBytesEncoder;
import org.apache.hadoop.hbase.spark.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.spark.protobuf.generated.SparkFilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.sql.datasources.hbase.Field;
@ -183,9 +183,9 @@ public class SparkSQLPushDownFilter extends FilterBase{
public static SparkSQLPushDownFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
FilterProtos.SQLPredicatePushDownFilter proto;
SparkFilterProtos.SQLPredicatePushDownFilter proto;
try {
proto = FilterProtos.SQLPredicatePushDownFilter.parseFrom(pbBytes);
proto = SparkFilterProtos.SQLPredicatePushDownFilter.parseFrom(pbBytes);
} catch (InvalidProtocolBufferException e) {
throw new DeserializationException(e);
}
@ -208,7 +208,7 @@ public class SparkSQLPushDownFilter extends FilterBase{
HashMap<ByteArrayComparable, HashMap<ByteArrayComparable, String>>
currentCellToColumnIndexMap = new HashMap<>();
for (FilterProtos.SQLPredicatePushDownCellToColumnMapping
for (SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping
sqlPredicatePushDownCellToColumnMapping :
proto.getCellToColumnMappingList()) {
@ -242,11 +242,11 @@ public class SparkSQLPushDownFilter extends FilterBase{
*/
public byte[] toByteArray() {
FilterProtos.SQLPredicatePushDownFilter.Builder builder =
FilterProtos.SQLPredicatePushDownFilter.newBuilder();
SparkFilterProtos.SQLPredicatePushDownFilter.Builder builder =
SparkFilterProtos.SQLPredicatePushDownFilter.newBuilder();
FilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder columnMappingBuilder =
FilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder();
SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.Builder columnMappingBuilder =
SparkFilterProtos.SQLPredicatePushDownCellToColumnMapping.newBuilder();
builder.setDynamicLogicExpression(dynamicLogicExpression.toExpressionString());
for (byte[] valueFromQuery: valueFromQueryArray) {

View File

@ -351,6 +351,10 @@ mvn -Dhadoop.profile=22 ...
You may need to change the protobuf definitions that reside in the _hbase-protocol_ module or other modules.
Previous to hbase-2.0.0, protobuf definition files were sprinkled across all hbase modules but now all
to do with protobuf must reside in the hbase-protocol module; we are trying to contain our protobuf
use so we can freely change versions without upsetting any downstream project use of protobuf.
The protobuf files are located in _hbase-protocol/src/main/protobuf_.
For the change to be effective, you will need to regenerate the classes.
You can use maven profile `compile-protobuf` to do this.