From ee70b1d2e05f4b007ffbf7698b192b44c986c8ae Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Wed, 5 Jul 2017 12:06:29 -0700 Subject: [PATCH] HBASE-17056 Remove checked in PB generated files Selective add of dependency on hbase-thirdparty jars. Update to READMEs on how protobuf is done (and update to refguide). Removed all checked in generated protobuf files. They are generated on the fly now as part of mainline build. --- hbase-client/pom.xml | 4 + hbase-endpoint/README.txt | 25 +- hbase-endpoint/pom.xml | 258 +- .../example/generated/BulkDeleteProtos.java | 1792 - .../generated/ColumnAggregationProtos.java | 1277 - .../ColumnAggregationWithErrorsProtos.java | 1290 - ...lumnAggregationWithNullResponseProtos.java | 1283 - .../DummyRegionServerEndpointProtos.java | 1225 - .../IncrementCounterProcessorTestProtos.java | 4059 - .../protobuf/generated/AggregateProtos.java | 2375 - .../generated/SecureBulkLoadProtos.java | 2088 - hbase-examples/README.txt | 27 +- hbase-examples/pom.xml | 320 +- .../example/generated/ExampleProtos.java | 1149 - hbase-procedure/pom.xml | 4 + hbase-protocol-shaded/README.txt | 49 +- hbase-protocol-shaded/pom.xml | 325 +- .../com/google/protobuf/AbstractMessage.java | 646 - .../google/protobuf/AbstractMessageLite.java | 383 - .../com/google/protobuf/AbstractParser.java | 258 - .../google/protobuf/AbstractProtobufList.java | 180 - .../hbase/shaded/com/google/protobuf/Any.java | 899 - .../com/google/protobuf/AnyOrBuilder.java | 70 - .../shaded/com/google/protobuf/AnyProto.java | 59 - .../hbase/shaded/com/google/protobuf/Api.java | 2473 - .../com/google/protobuf/ApiOrBuilder.java | 258 - .../shaded/com/google/protobuf/ApiProto.java | 98 - .../google/protobuf/BlockingRpcChannel.java | 51 - .../com/google/protobuf/BlockingService.java | 64 - .../shaded/com/google/protobuf/BoolValue.java | 452 - .../google/protobuf/BoolValueOrBuilder.java | 18 - .../com/google/protobuf/BooleanArrayList.java | 272 - .../com/google/protobuf/ByteBufferWriter.java | 185 - .../shaded/com/google/protobuf/ByteInput.java | 81 - .../google/protobuf/ByteInputByteString.java | 251 - .../com/google/protobuf/ByteOutput.java | 116 - .../com/google/protobuf/ByteString.java | 1565 - .../com/google/protobuf/BytesValue.java | 454 - .../google/protobuf/BytesValueOrBuilder.java | 18 - .../com/google/protobuf/CodedInputStream.java | 3549 - .../google/protobuf/CodedOutputStream.java | 3001 - .../com/google/protobuf/DescriptorProtos.java | 39141 -------- .../com/google/protobuf/Descriptors.java | 2547 - .../com/google/protobuf/DoubleArrayList.java | 273 - .../com/google/protobuf/DoubleValue.java | 454 - .../google/protobuf/DoubleValueOrBuilder.java | 18 - .../shaded/com/google/protobuf/Duration.java | 618 - .../google/protobuf/DurationOrBuilder.java | 33 - .../com/google/protobuf/DurationProto.java | 59 - .../com/google/protobuf/DynamicMessage.java | 684 - .../shaded/com/google/protobuf/Empty.java | 386 - .../com/google/protobuf/EmptyOrBuilder.java | 9 - .../com/google/protobuf/EmptyProto.java | 58 - .../shaded/com/google/protobuf/Enum.java | 1745 - .../com/google/protobuf/EnumOrBuilder.java | 157 - .../shaded/com/google/protobuf/EnumValue.java | 1044 - .../google/protobuf/EnumValueOrBuilder.java | 80 - .../com/google/protobuf/ExperimentalApi.java | 66 - .../shaded/com/google/protobuf/Extension.java | 86 - .../com/google/protobuf/ExtensionLite.java | 63 - .../google/protobuf/ExtensionRegistry.java | 396 - .../protobuf/ExtensionRegistryFactory.java | 95 - .../protobuf/ExtensionRegistryLite.java | 227 - .../shaded/com/google/protobuf/Field.java | 2450 - .../shaded/com/google/protobuf/FieldMask.java | 903 - .../google/protobuf/FieldMaskOrBuilder.java | 44 - .../com/google/protobuf/FieldMaskProto.java | 59 - .../com/google/protobuf/FieldOrBuilder.java | 189 - .../shaded/com/google/protobuf/FieldSet.java | 909 - .../com/google/protobuf/FloatArrayList.java | 272 - .../com/google/protobuf/FloatValue.java | 454 - .../google/protobuf/FloatValueOrBuilder.java | 18 - .../com/google/protobuf/GeneratedMessage.java | 3047 - .../google/protobuf/GeneratedMessageLite.java | 2419 - .../google/protobuf/GeneratedMessageV3.java | 2838 - .../com/google/protobuf/Int32Value.java | 451 - .../google/protobuf/Int32ValueOrBuilder.java | 18 - .../com/google/protobuf/Int64Value.java | 452 - .../google/protobuf/Int64ValueOrBuilder.java | 18 - .../com/google/protobuf/IntArrayList.java | 272 - .../shaded/com/google/protobuf/Internal.java | 751 - .../InvalidProtocolBufferException.java | 146 - .../shaded/com/google/protobuf/LazyField.java | 154 - .../com/google/protobuf/LazyFieldLite.java | 437 - .../google/protobuf/LazyStringArrayList.java | 423 - .../com/google/protobuf/LazyStringList.java | 174 - .../shaded/com/google/protobuf/ListValue.java | 814 - .../google/protobuf/ListValueOrBuilder.java | 53 - .../com/google/protobuf/LongArrayList.java | 272 - .../shaded/com/google/protobuf/MapEntry.java | 449 - .../com/google/protobuf/MapEntryLite.java | 226 - .../shaded/com/google/protobuf/MapField.java | 624 - .../com/google/protobuf/MapFieldLite.java | 224 - .../shaded/com/google/protobuf/Message.java | 292 - .../com/google/protobuf/MessageLite.java | 341 - .../google/protobuf/MessageLiteOrBuilder.java | 60 - .../google/protobuf/MessageLiteToString.java | 239 - .../com/google/protobuf/MessageOrBuilder.java | 143 - .../google/protobuf/MessageReflection.java | 990 - .../shaded/com/google/protobuf/Method.java | 1541 - .../com/google/protobuf/MethodOrBuilder.java | 142 - .../shaded/com/google/protobuf/Mixin.java | 812 - .../com/google/protobuf/MixinOrBuilder.java | 47 - .../com/google/protobuf/MutabilityOracle.java | 48 - .../com/google/protobuf/NioByteString.java | 291 - .../shaded/com/google/protobuf/NullValue.java | 108 - .../shaded/com/google/protobuf/Option.java | 818 - .../com/google/protobuf/OptionOrBuilder.java | 67 - .../shaded/com/google/protobuf/Parser.java | 272 - .../google/protobuf/ProtobufArrayList.java | 105 - .../google/protobuf/ProtocolMessageEnum.java | 59 - .../google/protobuf/ProtocolStringList.java | 48 - .../google/protobuf/RepeatedFieldBuilder.java | 708 - .../protobuf/RepeatedFieldBuilderV3.java | 708 - .../com/google/protobuf/RopeByteString.java | 897 - .../com/google/protobuf/RpcCallback.java | 47 - .../com/google/protobuf/RpcChannel.java | 71 - .../com/google/protobuf/RpcController.java | 118 - .../shaded/com/google/protobuf/RpcUtil.java | 136 - .../shaded/com/google/protobuf/Service.java | 117 - .../com/google/protobuf/ServiceException.java | 52 - .../google/protobuf/SingleFieldBuilder.java | 241 - .../google/protobuf/SingleFieldBuilderV3.java | 241 - .../com/google/protobuf/SmallSortedMap.java | 673 - .../com/google/protobuf/SourceContext.java | 539 - .../protobuf/SourceContextOrBuilder.java | 29 - .../google/protobuf/SourceContextProto.java | 60 - .../com/google/protobuf/StringValue.java | 532 - .../google/protobuf/StringValueOrBuilder.java | 27 - .../shaded/com/google/protobuf/Struct.java | 705 - .../com/google/protobuf/StructOrBuilder.java | 63 - .../com/google/protobuf/StructProto.java | 103 - .../shaded/com/google/protobuf/Syntax.java | 123 - .../com/google/protobuf/TextFormat.java | 2108 - .../google/protobuf/TextFormatEscaper.java | 137 - .../protobuf/TextFormatParseInfoTree.java | 226 - .../protobuf/TextFormatParseLocation.java | 104 - .../shaded/com/google/protobuf/Timestamp.java | 616 - .../google/protobuf/TimestampOrBuilder.java | 32 - .../com/google/protobuf/TimestampProto.java | 59 - .../shaded/com/google/protobuf/Type.java | 1967 - .../com/google/protobuf/TypeOrBuilder.java | 192 - .../shaded/com/google/protobuf/TypeProto.java | 141 - .../com/google/protobuf/UInt32Value.java | 451 - .../google/protobuf/UInt32ValueOrBuilder.java | 18 - .../com/google/protobuf/UInt64Value.java | 452 - .../google/protobuf/UInt64ValueOrBuilder.java | 18 - .../UninitializedMessageException.java | 99 - .../com/google/protobuf/UnknownFieldSet.java | 1042 - .../google/protobuf/UnknownFieldSetLite.java | 432 - .../protobuf/UnmodifiableLazyStringList.java | 210 - .../google/protobuf/UnsafeByteOperations.java | 131 - .../com/google/protobuf/UnsafeUtil.java | 295 - .../shaded/com/google/protobuf/Utf8.java | 1764 - .../shaded/com/google/protobuf/Value.java | 1410 - .../com/google/protobuf/ValueOrBuilder.java | 98 - .../com/google/protobuf/WireFormat.java | 260 - .../com/google/protobuf/WrappersProto.java | 153 - .../protobuf/compiler/PluginProtos.java | 5424 -- .../generated/TestProcedureProtos.java | 608 - .../ipc/protobuf/generated/TestProtos.java | 2987 - .../generated/TestRpcServiceProtos.java | 575 - .../generated/AccessControlProtos.java | 11171 --- .../protobuf/generated/AdminProtos.java | 28175 ------ .../protobuf/generated/BackupProtos.java | 7013 -- .../shaded/protobuf/generated/CellProtos.java | 2287 - .../protobuf/generated/ClientProtos.java | 42367 -------- .../protobuf/generated/ClusterIdProtos.java | 668 - .../generated/ClusterStatusProtos.java | 15191 --- .../protobuf/generated/ComparatorProtos.java | 5719 -- .../protobuf/generated/EncryptionProtos.java | 980 - .../generated/ErrorHandlingProtos.java | 2997 - .../shaded/protobuf/generated/FSProtos.java | 1311 - .../protobuf/generated/FilterProtos.java | 18739 ---- .../protobuf/generated/HBaseProtos.java | 19346 ---- .../protobuf/generated/HFileProtos.java | 2449 - .../generated/LoadBalancerProtos.java | 524 - .../protobuf/generated/LockServiceProtos.java | 7595 -- .../protobuf/generated/MapReduceProtos.java | 1804 - .../generated/MasterProcedureProtos.java | 32136 ------ .../protobuf/generated/MasterProtos.java | 80755 ---------------- .../protobuf/generated/ProcedureProtos.java | 7844 -- .../protobuf/generated/QuotaProtos.java | 14525 --- .../shaded/protobuf/generated/RPCProtos.java | 8299 -- .../generated/RegionNormalizerProtos.java | 524 - .../generated/RegionServerStatusProtos.java | 11621 --- .../protobuf/generated/ReplicationProtos.java | 12909 --- .../protobuf/generated/SnapshotProtos.java | 6491 -- .../protobuf/generated/TracingProtos.java | 628 - .../shaded/protobuf/generated/WALProtos.java | 12308 --- .../protobuf/generated/ZooKeeperProtos.java | 4717 - hbase-protocol/README.txt | 16 +- hbase-protocol/pom.xml | 266 +- .../protobuf/generated/PingProtos.java | 4890 - .../generated/TestProcedureProtos.java | 530 - .../ipc/protobuf/generated/TestProtos.java | 2778 - .../generated/TestRpcServiceProtos.java | 568 - .../generated/AccessControlProtos.java | 10656 -- .../hbase/protobuf/generated/AdminProtos.java | 24326 ----- .../generated/AuthenticationProtos.java | 4659 - .../hbase/protobuf/generated/CellProtos.java | 2243 - .../protobuf/generated/ClientProtos.java | 41410 -------- .../protobuf/generated/ClusterIdProtos.java | 628 - .../generated/ClusterStatusProtos.java | 14975 --- .../protobuf/generated/ComparatorProtos.java | 5419 -- .../protobuf/generated/EncryptionProtos.java | 954 - .../generated/ErrorHandlingProtos.java | 2895 - .../hbase/protobuf/generated/FSProtos.java | 1235 - .../protobuf/generated/FilterProtos.java | 17809 ---- .../hbase/protobuf/generated/HBaseProtos.java | 19326 ---- .../hbase/protobuf/generated/HFileProtos.java | 2403 - .../generated/LoadBalancerProtos.java | 485 - .../protobuf/generated/MapReduceProtos.java | 1737 - .../generated/MultiRowMutationProtos.java | 2458 - .../hbase/protobuf/generated/QuotaProtos.java | 7342 -- .../hbase/protobuf/generated/RPCProtos.java | 6299 -- .../generated/RowProcessorProtos.java | 1701 - .../protobuf/generated/SnapshotProtos.java | 4789 - .../hbase/protobuf/generated/TableProtos.java | 607 - .../protobuf/generated/TracingProtos.java | 591 - .../generated/VisibilityLabelsProtos.java | 6649 -- .../hbase/protobuf/generated/WALProtos.java | 12109 --- .../protobuf/generated/ZooKeeperProtos.java | 8412 -- hbase-rest/README.txt | 21 +- hbase-rest/pom.xml | 67 +- .../rest/protobuf/generated/CellMessage.java | 731 - .../protobuf/generated/CellSetMessage.java | 1521 - .../generated/ColumnSchemaMessage.java | 1904 - .../generated/NamespacePropertiesMessage.java | 1394 - .../protobuf/generated/NamespacesMessage.java | 547 - .../protobuf/generated/ScannerMessage.java | 1578 - .../StorageClusterStatusMessage.java | 3955 - .../protobuf/generated/TableInfoMessage.java | 1802 - .../protobuf/generated/TableListMessage.java | 547 - .../generated/TableSchemaMessage.java | 2125 - .../protobuf/generated/VersionMessage.java | 1147 - hbase-rsgroup/README.txt | 27 +- hbase-rsgroup/pom.xml | 68 +- .../generated/RSGroupAdminProtos.java | 13571 --- .../protobuf/generated/RSGroupProtos.java | 1332 - hbase-server/pom.xml | 12 + hbase-spark/README.txt | 32 +- hbase-spark/pom.xml | 1275 +- .../protobuf/generated/SparkFilterProtos.java | 2006 - pom.xml | 17 +- src/main/asciidoc/_chapters/protobuf.adoc | 10 +- 246 files changed, 1222 insertions(+), 742097 deletions(-) delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java delete mode 100644 hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java delete mode 100644 hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractProtobufList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingRpcChannel.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingService.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteOutput.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Descriptors.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EmptyProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ExperimentalApi.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Extension.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ExtensionLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ExtensionRegistry.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ExtensionRegistryFactory.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ExtensionRegistryLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Field.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMask.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMaskOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMaskProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldSet.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessage.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageV3.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32ValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64ValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/InvalidProtocolBufferException.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyField.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyFieldLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyStringArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyStringList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ListValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ListValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LongArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntry.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntryLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapField.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapFieldLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Message.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MessageLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MessageLiteOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MessageLiteToString.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MessageOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MessageReflection.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Method.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MethodOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Mixin.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MixinOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MutabilityOracle.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/NioByteString.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/NullValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Option.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/OptionOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Parser.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ProtobufArrayList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ProtocolMessageEnum.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ProtocolStringList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RepeatedFieldBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RepeatedFieldBuilderV3.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RopeByteString.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcCallback.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcChannel.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcController.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RpcUtil.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Service.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ServiceException.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SmallSortedMap.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContextProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StructProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Syntax.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormat.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormatEscaper.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormatParseInfoTree.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormatParseLocation.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Timestamp.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TimestampOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TimestampProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Type.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TypeOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TypeProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32ValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64ValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UninitializedMessageException.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSetLite.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnmodifiableLazyStringList.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeByteOperations.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeUtil.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Value.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ValueOrBuilder.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/WireFormat.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/WrappersProto.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestRpcServiceProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ReplicationProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java delete mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/PingProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProcedureProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestRpcServiceProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AccessControlProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AuthenticationProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/CellProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterIdProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClusterStatusProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ComparatorProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/EncryptionProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FSProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/FilterProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HFileProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/LoadBalancerProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MapReduceProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MultiRowMutationProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/QuotaProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RowProcessorProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SnapshotProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TableProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/TracingProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/VisibilityLabelsProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java delete mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ZooKeeperProtos.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/NamespacePropertiesMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/NamespacesMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java delete mode 100644 hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java delete mode 100644 hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupAdminProtos.java delete mode 100644 hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RSGroupProtos.java delete mode 100644 hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/protobuf/generated/SparkFilterProtos.java diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index 3b072c4f813..df94cf98a95 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -87,6 +87,10 @@ + + org.apache.hbase.thirdparty + hbase-shaded-protobuf + org.apache.hbase diff --git a/hbase-endpoint/README.txt b/hbase-endpoint/README.txt index 861a77681bf..2d2cb4b3604 100644 --- a/hbase-endpoint/README.txt +++ b/hbase-endpoint/README.txt @@ -1,24 +1,13 @@ ON PROTOBUFS -This maven module has protobuf definition files ('.protos') used by hbase -Coprocessor Endpoints that ship with hbase core including tests. Coprocessor +This maven module has protobuf definition files ('.protos') used by hbase +Coprocessor Endpoints that ship with hbase core (including tests). Coprocessor Endpoints are meant to be standalone, independent code not reliant on hbase internals. They define their Service using protobuf. The protobuf version they use can be distinct from that used by HBase internally since HBase started shading its protobuf references. Endpoints have no access to the shaded protobuf -hbase uses. They do have access to the content of hbase-protocol but avoid using -as much of this as you can as it is liable to change. +hbase uses. They do have access to the content of hbase-protocol -- the +.protos found in this module -- but avoid using as much of this as you can as it is +liable to change. -Generation of java files from protobuf .proto files included here is done apart -from the build. Run the generation whenever you make changes to the .orotos files -and then check in the produced java (The reasoning is that change is infrequent -so why pay the price of generating files anew on each build. - -To generate java files from protos run: - - $ mvn compile -Dcompile-protobuf -or - $ mvn compile -Pcompile-protobuf - -After you've done the above, check it and then check in changes (or post a patch -on a JIRA with your definition file changes and the generated files). Be careful -to notice new files and files removed and do appropriate git rm/adds. +Generation of java files from protobuf .proto files included here is done as +part of the build. diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index c031d231a7f..5ac17353097 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -1,6 +1,6 @@ - - 4.0.0 - - hbase - org.apache.hbase - 2.0.0-alpha-2-SNAPSHOT - .. - - - hbase-endpoint - Apache HBase - Coprocessor Endpoint - HBase Coprocessor Endpoint implementations - - - - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - - - - org.apache.maven.plugins - maven-source-plugin - + 4.0.0 + + hbase + org.apache.hbase + 2.0.0-alpha-2-SNAPSHOT + .. + + hbase-endpoint + Apache HBase - Coprocessor Endpoint + HBase Coprocessor Endpoint implementations + + + true + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-source-plugin + maven-assembly-plugin @@ -57,38 +54,56 @@ true + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + compile-protoc + generate-sources + + compile + + + + ${basedir}/../hbase-protocol/src/main/protobuf + + + + + + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.apache.hadoop + hadoop-maven-plugins + [2.0.5-alpha,) + + protoc + + + + + + + + + + - - - - - org.eclipse.m2e - lifecycle-mapping - - - - - - org.apache.hadoop - hadoop-maven-plugins - [2.0.5-alpha,) - - protoc - - - - - - - - - - - - - - - + + + org.apache.hbase.thirdparty hbase-shaded-miscellaneous @@ -131,88 +146,57 @@ test-jar test - - org.apache.hbase - hbase-protocol - - - org.apache.hbase - hbase-client - - - - org.apache.hbase - hbase-server - - - org.apache.hbase - hbase-server - test-jar - test - - - - commons-logging - commons-logging - - - - - - - skipRpcTests - - - skipRpcTests - - - - true - true - - - - compile-protobuf - - - compile-protobuf - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - - - compile-protoc - generate-sources - - compile - - - - ${basedir}/../hbase-protocol/src/main/protobuf - - - - - - - - + + org.apache.hbase + hbase-server + + + org.apache.hbase + hbase-server + test-jar + test + + + + commons-logging + commons-logging + + + + + + skipRpcTests + + + skipRpcTests + + + + true + true + + - hadoop-2.0 - - !hadoop.profile + + + !hadoop.profile @@ -315,5 +299,5 @@ - + diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java deleted file mode 100644 index 373e03629a7..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java +++ /dev/null @@ -1,1792 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: BulkDelete.proto - -package org.apache.hadoop.hbase.coprocessor.example.generated; - -public final class BulkDeleteProtos { - private BulkDeleteProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface BulkDeleteRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .hbase.pb.Scan scan = 1; - /** - * required .hbase.pb.Scan scan = 1; - */ - boolean hasScan(); - /** - * required .hbase.pb.Scan scan = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); - /** - * required .hbase.pb.Scan scan = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - - // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - boolean hasDeleteType(); - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType(); - - // optional uint64 timestamp = 3; - /** - * optional uint64 timestamp = 3; - */ - boolean hasTimestamp(); - /** - * optional uint64 timestamp = 3; - */ - long getTimestamp(); - - // required uint32 rowBatchSize = 4; - /** - * required uint32 rowBatchSize = 4; - */ - boolean hasRowBatchSize(); - /** - * required uint32 rowBatchSize = 4; - */ - int getRowBatchSize(); - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteRequest} - */ - public static final class BulkDeleteRequest extends - com.google.protobuf.GeneratedMessage - implements BulkDeleteRequestOrBuilder { - // Use BulkDeleteRequest.newBuilder() to construct. - private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkDeleteRequest defaultInstance; - public static BulkDeleteRequest getDefaultInstance() { - return defaultInstance; - } - - public BulkDeleteRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private BulkDeleteRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = scan_.toBuilder(); - } - scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(scan_); - scan_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - deleteType_ = value; - } - break; - } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - rowBatchSize_ = input.readUInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkDeleteRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkDeleteRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - /** - * Protobuf enum {@code hbase.pb.BulkDeleteRequest.DeleteType} - */ - public enum DeleteType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * ROW = 0; - */ - ROW(0, 0), - /** - * FAMILY = 1; - */ - FAMILY(1, 1), - /** - * COLUMN = 2; - */ - COLUMN(2, 2), - /** - * VERSION = 3; - */ - VERSION(3, 3), - ; - - /** - * ROW = 0; - */ - public static final int ROW_VALUE = 0; - /** - * FAMILY = 1; - */ - public static final int FAMILY_VALUE = 1; - /** - * COLUMN = 2; - */ - public static final int COLUMN_VALUE = 2; - /** - * VERSION = 3; - */ - public static final int VERSION_VALUE = 3; - - - public final int getNumber() { return value; } - - public static DeleteType valueOf(int value) { - switch (value) { - case 0: return ROW; - case 1: return FAMILY; - case 2: return COLUMN; - case 3: return VERSION; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public DeleteType findValueByNumber(int number) { - return DeleteType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0); - } - - private static final DeleteType[] VALUES = values(); - - public static DeleteType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private DeleteType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:hbase.pb.BulkDeleteRequest.DeleteType) - } - - private int bitField0_; - // required .hbase.pb.Scan scan = 1; - public static final int SCAN_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; - /** - * required .hbase.pb.Scan scan = 1; - */ - public boolean hasScan() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - return scan_; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - public static final int DELETETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_; - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public boolean hasDeleteType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() { - return deleteType_; - } - - // optional uint64 timestamp = 3; - public static final int TIMESTAMP_FIELD_NUMBER = 3; - private long timestamp_; - /** - * optional uint64 timestamp = 3; - */ - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional uint64 timestamp = 3; - */ - public long getTimestamp() { - return timestamp_; - } - - // required uint32 rowBatchSize = 4; - public static final int ROWBATCHSIZE_FIELD_NUMBER = 4; - private int rowBatchSize_; - /** - * required uint32 rowBatchSize = 4; - */ - public boolean hasRowBatchSize() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * required uint32 rowBatchSize = 4; - */ - public int getRowBatchSize() { - return rowBatchSize_; - } - - private void initFields() { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - timestamp_ = 0L; - rowBatchSize_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasScan()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasDeleteType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRowBatchSize()) { - memoizedIsInitialized = 0; - return false; - } - if (!getScan().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, scan_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, deleteType_.getNumber()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt32(4, rowBatchSize_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, scan_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, deleteType_.getNumber()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, rowBatchSize_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) obj; - - boolean result = true; - result = result && (hasScan() == other.hasScan()); - if (hasScan()) { - result = result && getScan() - .equals(other.getScan()); - } - result = result && (hasDeleteType() == other.hasDeleteType()); - if (hasDeleteType()) { - result = result && - (getDeleteType() == other.getDeleteType()); - } - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && (hasRowBatchSize() == other.hasRowBatchSize()); - if (hasRowBatchSize()) { - result = result && (getRowBatchSize() - == other.getRowBatchSize()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasScan()) { - hash = (37 * hash) + SCAN_FIELD_NUMBER; - hash = (53 * hash) + getScan().hashCode(); - } - if (hasDeleteType()) { - hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getDeleteType()); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - if (hasRowBatchSize()) { - hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER; - hash = (53 * hash) + getRowBatchSize(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getScanFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - bitField0_ = (bitField0_ & ~0x00000002); - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - rowBatchSize_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest build() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (scanBuilder_ == null) { - result.scan_ = scan_; - } else { - result.scan_ = scanBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.deleteType_ = deleteType_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.timestamp_ = timestamp_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.rowBatchSize_ = rowBatchSize_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this; - if (other.hasScan()) { - mergeScan(other.getScan()); - } - if (other.hasDeleteType()) { - setDeleteType(other.getDeleteType()); - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - if (other.hasRowBatchSize()) { - setRowBatchSize(other.getRowBatchSize()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasScan()) { - - return false; - } - if (!hasDeleteType()) { - - return false; - } - if (!hasRowBatchSize()) { - - return false; - } - if (!getScan().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required .hbase.pb.Scan scan = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; - /** - * required .hbase.pb.Scan scan = 1; - */ - public boolean hasScan() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - if (scanBuilder_ == null) { - return scan_; - } else { - return scanBuilder_.getMessage(); - } - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - scan_ = value; - onChanged(); - } else { - scanBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { - if (scanBuilder_ == null) { - scan_ = builderForValue.build(); - onChanged(); - } else { - scanBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { - scan_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); - } else { - scan_ = value; - } - onChanged(); - } else { - scanBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder clearScan() { - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - onChanged(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getScanFieldBuilder().getBuilder(); - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - if (scanBuilder_ != null) { - return scanBuilder_.getMessageOrBuilder(); - } else { - return scan_; - } - } - /** - * required .hbase.pb.Scan scan = 1; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> - getScanFieldBuilder() { - if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( - scan_, - getParentForChildren(), - isClean()); - scan_ = null; - } - return scanBuilder_; - } - - // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public boolean hasDeleteType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() { - return deleteType_; - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - deleteType_ = value; - onChanged(); - return this; - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public Builder clearDeleteType() { - bitField0_ = (bitField0_ & ~0x00000002); - deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - onChanged(); - return this; - } - - // optional uint64 timestamp = 3; - private long timestamp_ ; - /** - * optional uint64 timestamp = 3; - */ - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional uint64 timestamp = 3; - */ - public long getTimestamp() { - return timestamp_; - } - /** - * optional uint64 timestamp = 3; - */ - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000004; - timestamp_ = value; - onChanged(); - return this; - } - /** - * optional uint64 timestamp = 3; - */ - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - onChanged(); - return this; - } - - // required uint32 rowBatchSize = 4; - private int rowBatchSize_ ; - /** - * required uint32 rowBatchSize = 4; - */ - public boolean hasRowBatchSize() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * required uint32 rowBatchSize = 4; - */ - public int getRowBatchSize() { - return rowBatchSize_; - } - /** - * required uint32 rowBatchSize = 4; - */ - public Builder setRowBatchSize(int value) { - bitField0_ |= 0x00000008; - rowBatchSize_ = value; - onChanged(); - return this; - } - /** - * required uint32 rowBatchSize = 4; - */ - public Builder clearRowBatchSize() { - bitField0_ = (bitField0_ & ~0x00000008); - rowBatchSize_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteRequest) - } - - static { - defaultInstance = new BulkDeleteRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteRequest) - } - - public interface BulkDeleteResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 rowsDeleted = 1; - /** - * required uint64 rowsDeleted = 1; - */ - boolean hasRowsDeleted(); - /** - * required uint64 rowsDeleted = 1; - */ - long getRowsDeleted(); - - // optional uint64 versionsDeleted = 2; - /** - * optional uint64 versionsDeleted = 2; - */ - boolean hasVersionsDeleted(); - /** - * optional uint64 versionsDeleted = 2; - */ - long getVersionsDeleted(); - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteResponse} - */ - public static final class BulkDeleteResponse extends - com.google.protobuf.GeneratedMessage - implements BulkDeleteResponseOrBuilder { - // Use BulkDeleteResponse.newBuilder() to construct. - private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkDeleteResponse defaultInstance; - public static BulkDeleteResponse getDefaultInstance() { - return defaultInstance; - } - - public BulkDeleteResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private BulkDeleteResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - rowsDeleted_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionsDeleted_ = input.readUInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkDeleteResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkDeleteResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required uint64 rowsDeleted = 1; - public static final int ROWSDELETED_FIELD_NUMBER = 1; - private long rowsDeleted_; - /** - * required uint64 rowsDeleted = 1; - */ - public boolean hasRowsDeleted() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required uint64 rowsDeleted = 1; - */ - public long getRowsDeleted() { - return rowsDeleted_; - } - - // optional uint64 versionsDeleted = 2; - public static final int VERSIONSDELETED_FIELD_NUMBER = 2; - private long versionsDeleted_; - /** - * optional uint64 versionsDeleted = 2; - */ - public boolean hasVersionsDeleted() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional uint64 versionsDeleted = 2; - */ - public long getVersionsDeleted() { - return versionsDeleted_; - } - - private void initFields() { - rowsDeleted_ = 0L; - versionsDeleted_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRowsDeleted()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, rowsDeleted_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, versionsDeleted_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, rowsDeleted_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, versionsDeleted_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) obj; - - boolean result = true; - result = result && (hasRowsDeleted() == other.hasRowsDeleted()); - if (hasRowsDeleted()) { - result = result && (getRowsDeleted() - == other.getRowsDeleted()); - } - result = result && (hasVersionsDeleted() == other.hasVersionsDeleted()); - if (hasVersionsDeleted()) { - result = result && (getVersionsDeleted() - == other.getVersionsDeleted()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRowsDeleted()) { - hash = (37 * hash) + ROWSDELETED_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getRowsDeleted()); - } - if (hasVersionsDeleted()) { - hash = (37 * hash) + VERSIONSDELETED_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getVersionsDeleted()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - rowsDeleted_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - versionsDeleted_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse build() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.rowsDeleted_ = rowsDeleted_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.versionsDeleted_ = versionsDeleted_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()) return this; - if (other.hasRowsDeleted()) { - setRowsDeleted(other.getRowsDeleted()); - } - if (other.hasVersionsDeleted()) { - setVersionsDeleted(other.getVersionsDeleted()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRowsDeleted()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required uint64 rowsDeleted = 1; - private long rowsDeleted_ ; - /** - * required uint64 rowsDeleted = 1; - */ - public boolean hasRowsDeleted() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required uint64 rowsDeleted = 1; - */ - public long getRowsDeleted() { - return rowsDeleted_; - } - /** - * required uint64 rowsDeleted = 1; - */ - public Builder setRowsDeleted(long value) { - bitField0_ |= 0x00000001; - rowsDeleted_ = value; - onChanged(); - return this; - } - /** - * required uint64 rowsDeleted = 1; - */ - public Builder clearRowsDeleted() { - bitField0_ = (bitField0_ & ~0x00000001); - rowsDeleted_ = 0L; - onChanged(); - return this; - } - - // optional uint64 versionsDeleted = 2; - private long versionsDeleted_ ; - /** - * optional uint64 versionsDeleted = 2; - */ - public boolean hasVersionsDeleted() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional uint64 versionsDeleted = 2; - */ - public long getVersionsDeleted() { - return versionsDeleted_; - } - /** - * optional uint64 versionsDeleted = 2; - */ - public Builder setVersionsDeleted(long value) { - bitField0_ |= 0x00000002; - versionsDeleted_ = value; - onChanged(); - return this; - } - /** - * optional uint64 versionsDeleted = 2; - */ - public Builder clearVersionsDeleted() { - bitField0_ = (bitField0_ & ~0x00000002); - versionsDeleted_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteResponse) - } - - static { - defaultInstance = new BulkDeleteResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteResponse) - } - - /** - * Protobuf service {@code hbase.pb.BulkDeleteService} - */ - public static abstract class BulkDeleteService - implements com.google.protobuf.Service { - protected BulkDeleteService() {} - - public interface Interface { - /** - * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); - */ - public abstract void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new BulkDeleteService() { - @java.lang.Override - public void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done) { - impl.delete(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); - */ - public abstract void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_BulkDeleteRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_BulkDeleteResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\020BulkDelete.proto\022\010hbase.pb\032\014Client.pro" + - "to\"\322\001\n\021BulkDeleteRequest\022\034\n\004scan\030\001 \002(\0132\016" + - ".hbase.pb.Scan\022:\n\ndeleteType\030\002 \002(\0162&.hba" + - "se.pb.BulkDeleteRequest.DeleteType\022\021\n\tti" + - "mestamp\030\003 \001(\004\022\024\n\014rowBatchSize\030\004 \002(\r\":\n\nD" + - "eleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY\020\001\022\n\n\006COLUMN" + - "\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDeleteResponse\022\023\n" + - "\013rowsDeleted\030\001 \002(\004\022\027\n\017versionsDeleted\030\002 " + - "\001(\0042X\n\021BulkDeleteService\022C\n\006delete\022\033.hba" + - "se.pb.BulkDeleteRequest\032\034.hbase.pb.BulkD", - "eleteResponseBQ\n5org.apache.hadoop.hbase" + - ".coprocessor.example.generatedB\020BulkDele" + - "teProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_BulkDeleteRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkDeleteRequest_descriptor, - new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", }); - internal_static_hbase_pb_BulkDeleteResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkDeleteResponse_descriptor, - new java.lang.String[] { "RowsDeleted", "VersionsDeleted", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java deleted file mode 100644 index 61b47ff0aa3..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationProtos.java +++ /dev/null @@ -1,1277 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ColumnAggregationProtocol.proto - -package org.apache.hadoop.hbase.coprocessor.protobuf.generated; - -public final class ColumnAggregationProtos { - private ColumnAggregationProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface SumRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - /** - * required bytes family = 1; - */ - boolean hasFamily(); - /** - * required bytes family = 1; - */ - com.google.protobuf.ByteString getFamily(); - - // optional bytes qualifier = 2; - /** - * optional bytes qualifier = 2; - */ - boolean hasQualifier(); - /** - * optional bytes qualifier = 2; - */ - com.google.protobuf.ByteString getQualifier(); - } - /** - * Protobuf type {@code SumRequest} - */ - public static final class SumRequest extends - com.google.protobuf.GeneratedMessage - implements SumRequestOrBuilder { - // Use SumRequest.newBuilder() to construct. - private SumRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SumRequest defaultInstance; - public static SumRequest getDefaultInstance() { - return defaultInstance; - } - - public SumRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SumRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - qualifier_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SumRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SumRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - /** - * required bytes family = 1; - */ - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family = 1; - */ - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // optional bytes qualifier = 2; - public static final int QUALIFIER_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString qualifier_; - /** - * optional bytes qualifier = 2; - */ - public boolean hasQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes qualifier = 2; - */ - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, qualifier_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, qualifier_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code SumRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.qualifier_ = qualifier_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes family = 1; - */ - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family = 1; - */ - public com.google.protobuf.ByteString getFamily() { - return family_; - } - /** - * required bytes family = 1; - */ - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - /** - * required bytes family = 1; - */ - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // optional bytes qualifier = 2; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes qualifier = 2; - */ - public boolean hasQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes qualifier = 2; - */ - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - /** - * optional bytes qualifier = 2; - */ - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - qualifier_ = value; - onChanged(); - return this; - } - /** - * optional bytes qualifier = 2; - */ - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000002); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SumRequest) - } - - static { - defaultInstance = new SumRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SumRequest) - } - - public interface SumResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int64 sum = 1; - /** - * required int64 sum = 1; - */ - boolean hasSum(); - /** - * required int64 sum = 1; - */ - long getSum(); - } - /** - * Protobuf type {@code SumResponse} - */ - public static final class SumResponse extends - com.google.protobuf.GeneratedMessage - implements SumResponseOrBuilder { - // Use SumResponse.newBuilder() to construct. - private SumResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SumResponse defaultInstance; - public static SumResponse getDefaultInstance() { - return defaultInstance; - } - - public SumResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SumResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - sum_ = input.readInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SumResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SumResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required int64 sum = 1; - public static final int SUM_FIELD_NUMBER = 1; - private long sum_; - /** - * required int64 sum = 1; - */ - public boolean hasSum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 sum = 1; - */ - public long getSum() { - return sum_; - } - - private void initFields() { - sum_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasSum()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, sum_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, sum_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) obj; - - boolean result = true; - result = result && (hasSum() == other.hasSum()); - if (hasSum()) { - result = result && (getSum() - == other.getSum()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSum()) { - hash = (37 * hash) + SUM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getSum()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code SumResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - sum_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.sum_ = sum_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance()) return this; - if (other.hasSum()) { - setSum(other.getSum()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasSum()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required int64 sum = 1; - private long sum_ ; - /** - * required int64 sum = 1; - */ - public boolean hasSum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 sum = 1; - */ - public long getSum() { - return sum_; - } - /** - * required int64 sum = 1; - */ - public Builder setSum(long value) { - bitField0_ |= 0x00000001; - sum_ = value; - onChanged(); - return this; - } - /** - * required int64 sum = 1; - */ - public Builder clearSum() { - bitField0_ = (bitField0_ & ~0x00000001); - sum_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:SumResponse) - } - - static { - defaultInstance = new SumResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:SumResponse) - } - - /** - * Protobuf service {@code ColumnAggregationService} - */ - public static abstract class ColumnAggregationService - implements com.google.protobuf.Service { - protected ColumnAggregationService() {} - - public interface Interface { - /** - * rpc sum(.SumRequest) returns (.SumResponse); - */ - public abstract void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new ColumnAggregationService() { - @java.lang.Override - public void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, - com.google.protobuf.RpcCallback done) { - impl.sum(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc sum(.SumRequest) returns (.SumResponse); - */ - public abstract void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.ColumnAggregationService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SumRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SumRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_SumResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_SumResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\037ColumnAggregationProtocol.proto\"/\n\nSum" + - "Request\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \001" + - "(\014\"\032\n\013SumResponse\022\013\n\003sum\030\001 \002(\0032<\n\030Column" + - "AggregationService\022 \n\003sum\022\013.SumRequest\032\014" + - ".SumResponseBW\n6org.apache.hadoop.hbase." + - "coprocessor.protobuf.generatedB\027ColumnAg" + - "gregationProtos\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_SumRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_SumRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SumRequest_descriptor, - new java.lang.String[] { "Family", "Qualifier", }); - internal_static_SumResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_SumResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_SumResponse_descriptor, - new java.lang.String[] { "Sum", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java deleted file mode 100644 index 64cf82e4cc3..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithErrorsProtos.java +++ /dev/null @@ -1,1290 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ColumnAggregationWithErrorsProtocol.proto - -package org.apache.hadoop.hbase.coprocessor.protobuf.generated; - -public final class ColumnAggregationWithErrorsProtos { - private ColumnAggregationWithErrorsProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ColumnAggregationWithErrorsSumRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - /** - * required bytes family = 1; - */ - boolean hasFamily(); - /** - * required bytes family = 1; - */ - com.google.protobuf.ByteString getFamily(); - - // optional bytes qualifier = 2; - /** - * optional bytes qualifier = 2; - */ - boolean hasQualifier(); - /** - * optional bytes qualifier = 2; - */ - com.google.protobuf.ByteString getQualifier(); - } - /** - * Protobuf type {@code ColumnAggregationWithErrorsSumRequest} - * - *
-   * use unique names for messages in ColumnAggregationXXX.protos due to a bug in
-   * protoc or hadoop's protoc compiler.
-   * 
- */ - public static final class ColumnAggregationWithErrorsSumRequest extends - com.google.protobuf.GeneratedMessage - implements ColumnAggregationWithErrorsSumRequestOrBuilder { - // Use ColumnAggregationWithErrorsSumRequest.newBuilder() to construct. - private ColumnAggregationWithErrorsSumRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnAggregationWithErrorsSumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnAggregationWithErrorsSumRequest defaultInstance; - public static ColumnAggregationWithErrorsSumRequest getDefaultInstance() { - return defaultInstance; - } - - public ColumnAggregationWithErrorsSumRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ColumnAggregationWithErrorsSumRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - qualifier_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnAggregationWithErrorsSumRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnAggregationWithErrorsSumRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - /** - * required bytes family = 1; - */ - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family = 1; - */ - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // optional bytes qualifier = 2; - public static final int QUALIFIER_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString qualifier_; - /** - * optional bytes qualifier = 2; - */ - public boolean hasQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes qualifier = 2; - */ - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, qualifier_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, qualifier_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ColumnAggregationWithErrorsSumRequest} - * - *
-     * use unique names for messages in ColumnAggregationXXX.protos due to a bug in
-     * protoc or hadoop's protoc compiler.
-     * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.qualifier_ = qualifier_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes family = 1; - */ - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family = 1; - */ - public com.google.protobuf.ByteString getFamily() { - return family_; - } - /** - * required bytes family = 1; - */ - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - /** - * required bytes family = 1; - */ - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // optional bytes qualifier = 2; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes qualifier = 2; - */ - public boolean hasQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes qualifier = 2; - */ - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - /** - * optional bytes qualifier = 2; - */ - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - qualifier_ = value; - onChanged(); - return this; - } - /** - * optional bytes qualifier = 2; - */ - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000002); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnAggregationWithErrorsSumRequest) - } - - static { - defaultInstance = new ColumnAggregationWithErrorsSumRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationWithErrorsSumRequest) - } - - public interface ColumnAggregationWithErrorsSumResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int64 sum = 1; - /** - * required int64 sum = 1; - */ - boolean hasSum(); - /** - * required int64 sum = 1; - */ - long getSum(); - } - /** - * Protobuf type {@code ColumnAggregationWithErrorsSumResponse} - */ - public static final class ColumnAggregationWithErrorsSumResponse extends - com.google.protobuf.GeneratedMessage - implements ColumnAggregationWithErrorsSumResponseOrBuilder { - // Use ColumnAggregationWithErrorsSumResponse.newBuilder() to construct. - private ColumnAggregationWithErrorsSumResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnAggregationWithErrorsSumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnAggregationWithErrorsSumResponse defaultInstance; - public static ColumnAggregationWithErrorsSumResponse getDefaultInstance() { - return defaultInstance; - } - - public ColumnAggregationWithErrorsSumResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ColumnAggregationWithErrorsSumResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - sum_ = input.readInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnAggregationWithErrorsSumResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnAggregationWithErrorsSumResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required int64 sum = 1; - public static final int SUM_FIELD_NUMBER = 1; - private long sum_; - /** - * required int64 sum = 1; - */ - public boolean hasSum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 sum = 1; - */ - public long getSum() { - return sum_; - } - - private void initFields() { - sum_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasSum()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, sum_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, sum_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) obj; - - boolean result = true; - result = result && (hasSum() == other.hasSum()); - if (hasSum()) { - result = result && (getSum() - == other.getSum()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSum()) { - hash = (37 * hash) + SUM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getSum()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ColumnAggregationWithErrorsSumResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - sum_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.sum_ = sum_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance()) return this; - if (other.hasSum()) { - setSum(other.getSum()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasSum()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required int64 sum = 1; - private long sum_ ; - /** - * required int64 sum = 1; - */ - public boolean hasSum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 sum = 1; - */ - public long getSum() { - return sum_; - } - /** - * required int64 sum = 1; - */ - public Builder setSum(long value) { - bitField0_ |= 0x00000001; - sum_ = value; - onChanged(); - return this; - } - /** - * required int64 sum = 1; - */ - public Builder clearSum() { - bitField0_ = (bitField0_ & ~0x00000001); - sum_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnAggregationWithErrorsSumResponse) - } - - static { - defaultInstance = new ColumnAggregationWithErrorsSumResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationWithErrorsSumResponse) - } - - /** - * Protobuf service {@code ColumnAggregationServiceWithErrors} - */ - public static abstract class ColumnAggregationServiceWithErrors - implements com.google.protobuf.Service { - protected ColumnAggregationServiceWithErrors() {} - - public interface Interface { - /** - * rpc sum(.ColumnAggregationWithErrorsSumRequest) returns (.ColumnAggregationWithErrorsSumResponse); - */ - public abstract void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new ColumnAggregationServiceWithErrors() { - @java.lang.Override - public void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, - com.google.protobuf.RpcCallback done) { - impl.sum(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc sum(.ColumnAggregationWithErrorsSumRequest) returns (.ColumnAggregationWithErrorsSumResponse); - */ - public abstract void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationServiceWithErrors) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n)ColumnAggregationWithErrorsProtocol.pr" + - "oto\"J\n%ColumnAggregationWithErrorsSumReq" + - "uest\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \001(\014\"" + - "5\n&ColumnAggregationWithErrorsSumRespons" + - "e\022\013\n\003sum\030\001 \002(\0032|\n\"ColumnAggregationServi" + - "ceWithErrors\022V\n\003sum\022&.ColumnAggregationW" + - "ithErrorsSumRequest\032\'.ColumnAggregationW" + - "ithErrorsSumResponseBa\n6org.apache.hadoo" + - "p.hbase.coprocessor.protobuf.generatedB!" + - "ColumnAggregationWithErrorsProtos\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_ColumnAggregationWithErrorsSumRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnAggregationWithErrorsSumRequest_descriptor, - new java.lang.String[] { "Family", "Qualifier", }); - internal_static_ColumnAggregationWithErrorsSumResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnAggregationWithErrorsSumResponse_descriptor, - new java.lang.String[] { "Sum", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java deleted file mode 100644 index b25f7aa67cc..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/ColumnAggregationWithNullResponseProtos.java +++ /dev/null @@ -1,1283 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ColumnAggregationNullResponseProtocol.proto - -package org.apache.hadoop.hbase.coprocessor.protobuf.generated; - -public final class ColumnAggregationWithNullResponseProtos { - private ColumnAggregationWithNullResponseProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface ColumnAggregationNullResponseSumRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - /** - * required bytes family = 1; - */ - boolean hasFamily(); - /** - * required bytes family = 1; - */ - com.google.protobuf.ByteString getFamily(); - - // optional bytes qualifier = 2; - /** - * optional bytes qualifier = 2; - */ - boolean hasQualifier(); - /** - * optional bytes qualifier = 2; - */ - com.google.protobuf.ByteString getQualifier(); - } - /** - * Protobuf type {@code ColumnAggregationNullResponseSumRequest} - * - *
-   * use unique names for messages in ColumnAggregationXXX.protos due to a bug in
-   * protoc or hadoop's protoc compiler.
-   * 
- */ - public static final class ColumnAggregationNullResponseSumRequest extends - com.google.protobuf.GeneratedMessage - implements ColumnAggregationNullResponseSumRequestOrBuilder { - // Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct. - private ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnAggregationNullResponseSumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnAggregationNullResponseSumRequest defaultInstance; - public static ColumnAggregationNullResponseSumRequest getDefaultInstance() { - return defaultInstance; - } - - public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ColumnAggregationNullResponseSumRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - qualifier_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnAggregationNullResponseSumRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnAggregationNullResponseSumRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - /** - * required bytes family = 1; - */ - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family = 1; - */ - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // optional bytes qualifier = 2; - public static final int QUALIFIER_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString qualifier_; - /** - * optional bytes qualifier = 2; - */ - public boolean hasQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes qualifier = 2; - */ - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - qualifier_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, qualifier_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, qualifier_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasQualifier() == other.hasQualifier()); - if (hasQualifier()) { - result = result && getQualifier() - .equals(other.getQualifier()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasQualifier()) { - hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; - hash = (53 * hash) + getQualifier().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ColumnAggregationNullResponseSumRequest} - * - *
-     * use unique names for messages in ColumnAggregationXXX.protos due to a bug in
-     * protoc or hadoop's protoc compiler.
-     * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - qualifier_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.qualifier_ = qualifier_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasQualifier()) { - setQualifier(other.getQualifier()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes family = 1; - */ - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes family = 1; - */ - public com.google.protobuf.ByteString getFamily() { - return family_; - } - /** - * required bytes family = 1; - */ - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - /** - * required bytes family = 1; - */ - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // optional bytes qualifier = 2; - private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes qualifier = 2; - */ - public boolean hasQualifier() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes qualifier = 2; - */ - public com.google.protobuf.ByteString getQualifier() { - return qualifier_; - } - /** - * optional bytes qualifier = 2; - */ - public Builder setQualifier(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - qualifier_ = value; - onChanged(); - return this; - } - /** - * optional bytes qualifier = 2; - */ - public Builder clearQualifier() { - bitField0_ = (bitField0_ & ~0x00000002); - qualifier_ = getDefaultInstance().getQualifier(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnAggregationNullResponseSumRequest) - } - - static { - defaultInstance = new ColumnAggregationNullResponseSumRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationNullResponseSumRequest) - } - - public interface ColumnAggregationNullResponseSumResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional int64 sum = 1; - /** - * optional int64 sum = 1; - */ - boolean hasSum(); - /** - * optional int64 sum = 1; - */ - long getSum(); - } - /** - * Protobuf type {@code ColumnAggregationNullResponseSumResponse} - */ - public static final class ColumnAggregationNullResponseSumResponse extends - com.google.protobuf.GeneratedMessage - implements ColumnAggregationNullResponseSumResponseOrBuilder { - // Use ColumnAggregationNullResponseSumResponse.newBuilder() to construct. - private ColumnAggregationNullResponseSumResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ColumnAggregationNullResponseSumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ColumnAggregationNullResponseSumResponse defaultInstance; - public static ColumnAggregationNullResponseSumResponse getDefaultInstance() { - return defaultInstance; - } - - public ColumnAggregationNullResponseSumResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ColumnAggregationNullResponseSumResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - sum_ = input.readInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public ColumnAggregationNullResponseSumResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ColumnAggregationNullResponseSumResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional int64 sum = 1; - public static final int SUM_FIELD_NUMBER = 1; - private long sum_; - /** - * optional int64 sum = 1; - */ - public boolean hasSum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional int64 sum = 1; - */ - public long getSum() { - return sum_; - } - - private void initFields() { - sum_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, sum_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, sum_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse) obj; - - boolean result = true; - result = result && (hasSum() == other.hasSum()); - if (hasSum()) { - result = result && (getSum() - == other.getSum()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasSum()) { - hash = (37 * hash) + SUM_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getSum()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ColumnAggregationNullResponseSumResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - sum_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.sum_ = sum_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance()) return this; - if (other.hasSum()) { - setSum(other.getSum()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional int64 sum = 1; - private long sum_ ; - /** - * optional int64 sum = 1; - */ - public boolean hasSum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional int64 sum = 1; - */ - public long getSum() { - return sum_; - } - /** - * optional int64 sum = 1; - */ - public Builder setSum(long value) { - bitField0_ |= 0x00000001; - sum_ = value; - onChanged(); - return this; - } - /** - * optional int64 sum = 1; - */ - public Builder clearSum() { - bitField0_ = (bitField0_ & ~0x00000001); - sum_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:ColumnAggregationNullResponseSumResponse) - } - - static { - defaultInstance = new ColumnAggregationNullResponseSumResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationNullResponseSumResponse) - } - - /** - * Protobuf service {@code ColumnAggregationServiceNullResponse} - */ - public static abstract class ColumnAggregationServiceNullResponse - implements com.google.protobuf.Service { - protected ColumnAggregationServiceNullResponse() {} - - public interface Interface { - /** - * rpc sum(.ColumnAggregationNullResponseSumRequest) returns (.ColumnAggregationNullResponseSumResponse); - */ - public abstract void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new ColumnAggregationServiceNullResponse() { - @java.lang.Override - public void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request, - com.google.protobuf.RpcCallback done) { - impl.sum(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc sum(.ColumnAggregationNullResponseSumRequest) returns (.ColumnAggregationNullResponseSumResponse); - */ - public abstract void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationServiceNullResponse implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse sum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:ColumnAggregationServiceNullResponse) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnAggregationNullResponseSumRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_ColumnAggregationNullResponseSumResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_ColumnAggregationNullResponseSumResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n+ColumnAggregationNullResponseProtocol." + - "proto\"L\n\'ColumnAggregationNullResponseSu" + - "mRequest\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 " + - "\001(\014\"7\n(ColumnAggregationNullResponseSumR" + - "esponse\022\013\n\003sum\030\001 \001(\0032\202\001\n$ColumnAggregati" + - "onServiceNullResponse\022Z\n\003sum\022(.ColumnAgg" + - "regationNullResponseSumRequest\032).ColumnA" + - "ggregationNullResponseSumResponseBg\n6org" + - ".apache.hadoop.hbase.coprocessor.protobu" + - "f.generatedB\'ColumnAggregationWithNullRe", - "sponseProtos\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_ColumnAggregationNullResponseSumRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnAggregationNullResponseSumRequest_descriptor, - new java.lang.String[] { "Family", "Qualifier", }); - internal_static_ColumnAggregationNullResponseSumResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_ColumnAggregationNullResponseSumResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_ColumnAggregationNullResponseSumResponse_descriptor, - new java.lang.String[] { "Sum", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java deleted file mode 100644 index a011b30eebc..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/DummyRegionServerEndpointProtos.java +++ /dev/null @@ -1,1225 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: DummyRegionServerEndpoint.proto - -package org.apache.hadoop.hbase.coprocessor.protobuf.generated; - -public final class DummyRegionServerEndpointProtos { - private DummyRegionServerEndpointProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface DummyRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code hbase.test.pb.DummyRequest} - */ - public static final class DummyRequest extends - com.google.protobuf.GeneratedMessage - implements DummyRequestOrBuilder { - // Use DummyRequest.newBuilder() to construct. - private DummyRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DummyRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DummyRequest defaultInstance; - public static DummyRequest getDefaultInstance() { - return defaultInstance; - } - - public DummyRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DummyRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DummyRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DummyRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.test.pb.DummyRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyRequest) - } - - static { - defaultInstance = new DummyRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyRequest) - } - - public interface DummyResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string value = 1; - /** - * required string value = 1; - */ - boolean hasValue(); - /** - * required string value = 1; - */ - java.lang.String getValue(); - /** - * required string value = 1; - */ - com.google.protobuf.ByteString - getValueBytes(); - } - /** - * Protobuf type {@code hbase.test.pb.DummyResponse} - */ - public static final class DummyResponse extends - com.google.protobuf.GeneratedMessage - implements DummyResponseOrBuilder { - // Use DummyResponse.newBuilder() to construct. - private DummyResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DummyResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DummyResponse defaultInstance; - public static DummyResponse getDefaultInstance() { - return defaultInstance; - } - - public DummyResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DummyResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - value_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DummyResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DummyResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required string value = 1; - public static final int VALUE_FIELD_NUMBER = 1; - private java.lang.Object value_; - /** - * required string value = 1; - */ - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string value = 1; - */ - public java.lang.String getValue() { - java.lang.Object ref = value_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - value_ = s; - } - return s; - } - } - /** - * required string value = 1; - */ - public com.google.protobuf.ByteString - getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - value_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasValue()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getValueBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getValueBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse) obj; - - boolean result = true; - result = result && (hasValue() == other.hasValue()); - if (hasValue()) { - result = result && getValue() - .equals(other.getValue()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.test.pb.DummyResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - value_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.value_ = value_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance()) return this; - if (other.hasValue()) { - bitField0_ |= 0x00000001; - value_ = other.value_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasValue()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required string value = 1; - private java.lang.Object value_ = ""; - /** - * required string value = 1; - */ - public boolean hasValue() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string value = 1; - */ - public java.lang.String getValue() { - java.lang.Object ref = value_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - value_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string value = 1; - */ - public com.google.protobuf.ByteString - getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string value = 1; - */ - public Builder setValue( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - value_ = value; - onChanged(); - return this; - } - /** - * required string value = 1; - */ - public Builder clearValue() { - bitField0_ = (bitField0_ & ~0x00000001); - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - /** - * required string value = 1; - */ - public Builder setValueBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - value_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyResponse) - } - - static { - defaultInstance = new DummyResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyResponse) - } - - /** - * Protobuf service {@code hbase.test.pb.DummyService} - */ - public static abstract class DummyService - implements com.google.protobuf.Service { - protected DummyService() {} - - public interface Interface { - /** - * rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse); - */ - public abstract void dummyCall( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse); - */ - public abstract void dummyThrow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new DummyService() { - @java.lang.Override - public void dummyCall( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done) { - impl.dummyCall(controller, request, done); - } - - @java.lang.Override - public void dummyThrow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done) { - impl.dummyThrow(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.dummyCall(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest)request); - case 1: - return impl.dummyThrow(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse); - */ - public abstract void dummyCall( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse); - */ - public abstract void dummyThrow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.dummyCall(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.dummyThrow(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void dummyCall( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance())); - } - - public void dummyThrow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse dummyCall( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse dummyThrow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse dummyCall( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse dummyThrow( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_test_pb_DummyRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_test_pb_DummyResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\037DummyRegionServerEndpoint.proto\022\rhbase" + - ".test.pb\"\016\n\014DummyRequest\"\036\n\rDummyRespons" + - "e\022\r\n\005value\030\001 \002(\t2\237\001\n\014DummyService\022F\n\tdum" + - "myCall\022\033.hbase.test.pb.DummyRequest\032\034.hb" + - "ase.test.pb.DummyResponse\022G\n\ndummyThrow\022" + - "\033.hbase.test.pb.DummyRequest\032\034.hbase.tes" + - "t.pb.DummyResponseB_\n6org.apache.hadoop." + - "hbase.coprocessor.protobuf.generatedB\037Du" + - "mmyRegionServerEndpointProtos\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_test_pb_DummyRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_test_pb_DummyRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_test_pb_DummyRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_test_pb_DummyResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_test_pb_DummyResponse_descriptor, - new java.lang.String[] { "Value", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java deleted file mode 100644 index 7ba5b8edbd6..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/protobuf/generated/IncrementCounterProcessorTestProtos.java +++ /dev/null @@ -1,4059 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: IncrementCounterProcessor.proto - -package org.apache.hadoop.hbase.coprocessor.protobuf.generated; - -public final class IncrementCounterProcessorTestProtos { - private IncrementCounterProcessorTestProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface IncCounterProcessorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - /** - * required bytes row = 1; - */ - boolean hasRow(); - /** - * required bytes row = 1; - */ - com.google.protobuf.ByteString getRow(); - - // required int32 counter = 2; - /** - * required int32 counter = 2; - */ - boolean hasCounter(); - /** - * required int32 counter = 2; - */ - int getCounter(); - } - /** - * Protobuf type {@code IncCounterProcessorRequest} - */ - public static final class IncCounterProcessorRequest extends - com.google.protobuf.GeneratedMessage - implements IncCounterProcessorRequestOrBuilder { - // Use IncCounterProcessorRequest.newBuilder() to construct. - private IncCounterProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IncCounterProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IncCounterProcessorRequest defaultInstance; - public static IncCounterProcessorRequest getDefaultInstance() { - return defaultInstance; - } - - public IncCounterProcessorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private IncCounterProcessorRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - counter_ = input.readInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IncCounterProcessorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IncCounterProcessorRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - /** - * required bytes row = 1; - */ - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes row = 1; - */ - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // required int32 counter = 2; - public static final int COUNTER_FIELD_NUMBER = 2; - private int counter_; - /** - * required int32 counter = 2; - */ - public boolean hasCounter() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required int32 counter = 2; - */ - public int getCounter() { - return counter_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - counter_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasCounter()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, counter_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(2, counter_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && (hasCounter() == other.hasCounter()); - if (hasCounter()) { - result = result && (getCounter() - == other.getCounter()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (hasCounter()) { - hash = (37 * hash) + COUNTER_FIELD_NUMBER; - hash = (53 * hash) + getCounter(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code IncCounterProcessorRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - counter_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.counter_ = counter_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - if (other.hasCounter()) { - setCounter(other.getCounter()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - if (!hasCounter()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes row = 1; - */ - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes row = 1; - */ - public com.google.protobuf.ByteString getRow() { - return row_; - } - /** - * required bytes row = 1; - */ - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - /** - * required bytes row = 1; - */ - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // required int32 counter = 2; - private int counter_ ; - /** - * required int32 counter = 2; - */ - public boolean hasCounter() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required int32 counter = 2; - */ - public int getCounter() { - return counter_; - } - /** - * required int32 counter = 2; - */ - public Builder setCounter(int value) { - bitField0_ |= 0x00000002; - counter_ = value; - onChanged(); - return this; - } - /** - * required int32 counter = 2; - */ - public Builder clearCounter() { - bitField0_ = (bitField0_ & ~0x00000002); - counter_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:IncCounterProcessorRequest) - } - - static { - defaultInstance = new IncCounterProcessorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:IncCounterProcessorRequest) - } - - public interface IncCounterProcessorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int32 response = 1; - /** - * required int32 response = 1; - */ - boolean hasResponse(); - /** - * required int32 response = 1; - */ - int getResponse(); - } - /** - * Protobuf type {@code IncCounterProcessorResponse} - */ - public static final class IncCounterProcessorResponse extends - com.google.protobuf.GeneratedMessage - implements IncCounterProcessorResponseOrBuilder { - // Use IncCounterProcessorResponse.newBuilder() to construct. - private IncCounterProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private IncCounterProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final IncCounterProcessorResponse defaultInstance; - public static IncCounterProcessorResponse getDefaultInstance() { - return defaultInstance; - } - - public IncCounterProcessorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private IncCounterProcessorResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - response_ = input.readInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public IncCounterProcessorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new IncCounterProcessorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required int32 response = 1; - public static final int RESPONSE_FIELD_NUMBER = 1; - private int response_; - /** - * required int32 response = 1; - */ - public boolean hasResponse() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int32 response = 1; - */ - public int getResponse() { - return response_; - } - - private void initFields() { - response_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasResponse()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt32(1, response_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(1, response_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) obj; - - boolean result = true; - result = result && (hasResponse() == other.hasResponse()); - if (hasResponse()) { - result = result && (getResponse() - == other.getResponse()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasResponse()) { - hash = (37 * hash) + RESPONSE_FIELD_NUMBER; - hash = (53 * hash) + getResponse(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code IncCounterProcessorResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - response_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.response_ = response_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.getDefaultInstance()) return this; - if (other.hasResponse()) { - setResponse(other.getResponse()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasResponse()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required int32 response = 1; - private int response_ ; - /** - * required int32 response = 1; - */ - public boolean hasResponse() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int32 response = 1; - */ - public int getResponse() { - return response_; - } - /** - * required int32 response = 1; - */ - public Builder setResponse(int value) { - bitField0_ |= 0x00000001; - response_ = value; - onChanged(); - return this; - } - /** - * required int32 response = 1; - */ - public Builder clearResponse() { - bitField0_ = (bitField0_ & ~0x00000001); - response_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:IncCounterProcessorResponse) - } - - static { - defaultInstance = new IncCounterProcessorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:IncCounterProcessorResponse) - } - - public interface FriendsOfFriendsProcessorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes person = 1; - /** - * required bytes person = 1; - */ - boolean hasPerson(); - /** - * required bytes person = 1; - */ - com.google.protobuf.ByteString getPerson(); - - // required bytes row = 2; - /** - * required bytes row = 2; - */ - boolean hasRow(); - /** - * required bytes row = 2; - */ - com.google.protobuf.ByteString getRow(); - - // repeated string result = 3; - /** - * repeated string result = 3; - */ - java.util.List - getResultList(); - /** - * repeated string result = 3; - */ - int getResultCount(); - /** - * repeated string result = 3; - */ - java.lang.String getResult(int index); - /** - * repeated string result = 3; - */ - com.google.protobuf.ByteString - getResultBytes(int index); - } - /** - * Protobuf type {@code FriendsOfFriendsProcessorRequest} - */ - public static final class FriendsOfFriendsProcessorRequest extends - com.google.protobuf.GeneratedMessage - implements FriendsOfFriendsProcessorRequestOrBuilder { - // Use FriendsOfFriendsProcessorRequest.newBuilder() to construct. - private FriendsOfFriendsProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FriendsOfFriendsProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FriendsOfFriendsProcessorRequest defaultInstance; - public static FriendsOfFriendsProcessorRequest getDefaultInstance() { - return defaultInstance; - } - - public FriendsOfFriendsProcessorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private FriendsOfFriendsProcessorRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - person_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - row_ = input.readBytes(); - break; - } - case 26: { - if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - result_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000004; - } - result_.add(input.readBytes()); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - result_ = new com.google.protobuf.UnmodifiableLazyStringList(result_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FriendsOfFriendsProcessorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FriendsOfFriendsProcessorRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes person = 1; - public static final int PERSON_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString person_; - /** - * required bytes person = 1; - */ - public boolean hasPerson() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes person = 1; - */ - public com.google.protobuf.ByteString getPerson() { - return person_; - } - - // required bytes row = 2; - public static final int ROW_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString row_; - /** - * required bytes row = 2; - */ - public boolean hasRow() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required bytes row = 2; - */ - public com.google.protobuf.ByteString getRow() { - return row_; - } - - // repeated string result = 3; - public static final int RESULT_FIELD_NUMBER = 3; - private com.google.protobuf.LazyStringList result_; - /** - * repeated string result = 3; - */ - public java.util.List - getResultList() { - return result_; - } - /** - * repeated string result = 3; - */ - public int getResultCount() { - return result_.size(); - } - /** - * repeated string result = 3; - */ - public java.lang.String getResult(int index) { - return result_.get(index); - } - /** - * repeated string result = 3; - */ - public com.google.protobuf.ByteString - getResultBytes(int index) { - return result_.getByteString(index); - } - - private void initFields() { - person_ = com.google.protobuf.ByteString.EMPTY; - row_ = com.google.protobuf.ByteString.EMPTY; - result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasPerson()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, person_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, row_); - } - for (int i = 0; i < result_.size(); i++) { - output.writeBytes(3, result_.getByteString(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, person_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, row_); - } - { - int dataSize = 0; - for (int i = 0; i < result_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(result_.getByteString(i)); - } - size += dataSize; - size += 1 * getResultList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) obj; - - boolean result = true; - result = result && (hasPerson() == other.hasPerson()); - if (hasPerson()) { - result = result && getPerson() - .equals(other.getPerson()); - } - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && getResultList() - .equals(other.getResultList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasPerson()) { - hash = (37 * hash) + PERSON_FIELD_NUMBER; - hash = (53 * hash) + getPerson().hashCode(); - } - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - if (getResultCount() > 0) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResultList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code FriendsOfFriendsProcessorRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - person_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.person_ = person_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.row_ = row_; - if (((bitField0_ & 0x00000004) == 0x00000004)) { - result_ = new com.google.protobuf.UnmodifiableLazyStringList( - result_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.result_ = result_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.getDefaultInstance()) return this; - if (other.hasPerson()) { - setPerson(other.getPerson()); - } - if (other.hasRow()) { - setRow(other.getRow()); - } - if (!other.result_.isEmpty()) { - if (result_.isEmpty()) { - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureResultIsMutable(); - result_.addAll(other.result_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasPerson()) { - - return false; - } - if (!hasRow()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes person = 1; - private com.google.protobuf.ByteString person_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes person = 1; - */ - public boolean hasPerson() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes person = 1; - */ - public com.google.protobuf.ByteString getPerson() { - return person_; - } - /** - * required bytes person = 1; - */ - public Builder setPerson(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - person_ = value; - onChanged(); - return this; - } - /** - * required bytes person = 1; - */ - public Builder clearPerson() { - bitField0_ = (bitField0_ & ~0x00000001); - person_ = getDefaultInstance().getPerson(); - onChanged(); - return this; - } - - // required bytes row = 2; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes row = 2; - */ - public boolean hasRow() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required bytes row = 2; - */ - public com.google.protobuf.ByteString getRow() { - return row_; - } - /** - * required bytes row = 2; - */ - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - row_ = value; - onChanged(); - return this; - } - /** - * required bytes row = 2; - */ - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000002); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // repeated string result = 3; - private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureResultIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - result_ = new com.google.protobuf.LazyStringArrayList(result_); - bitField0_ |= 0x00000004; - } - } - /** - * repeated string result = 3; - */ - public java.util.List - getResultList() { - return java.util.Collections.unmodifiableList(result_); - } - /** - * repeated string result = 3; - */ - public int getResultCount() { - return result_.size(); - } - /** - * repeated string result = 3; - */ - public java.lang.String getResult(int index) { - return result_.get(index); - } - /** - * repeated string result = 3; - */ - public com.google.protobuf.ByteString - getResultBytes(int index) { - return result_.getByteString(index); - } - /** - * repeated string result = 3; - */ - public Builder setResult( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.set(index, value); - onChanged(); - return this; - } - /** - * repeated string result = 3; - */ - public Builder addResult( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - return this; - } - /** - * repeated string result = 3; - */ - public Builder addAllResult( - java.lang.Iterable values) { - ensureResultIsMutable(); - super.addAll(values, result_); - onChanged(); - return this; - } - /** - * repeated string result = 3; - */ - public Builder clearResult() { - result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - return this; - } - /** - * repeated string result = 3; - */ - public Builder addResultBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorRequest) - } - - static { - defaultInstance = new FriendsOfFriendsProcessorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorRequest) - } - - public interface FriendsOfFriendsProcessorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated string result = 1; - /** - * repeated string result = 1; - */ - java.util.List - getResultList(); - /** - * repeated string result = 1; - */ - int getResultCount(); - /** - * repeated string result = 1; - */ - java.lang.String getResult(int index); - /** - * repeated string result = 1; - */ - com.google.protobuf.ByteString - getResultBytes(int index); - } - /** - * Protobuf type {@code FriendsOfFriendsProcessorResponse} - */ - public static final class FriendsOfFriendsProcessorResponse extends - com.google.protobuf.GeneratedMessage - implements FriendsOfFriendsProcessorResponseOrBuilder { - // Use FriendsOfFriendsProcessorResponse.newBuilder() to construct. - private FriendsOfFriendsProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private FriendsOfFriendsProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final FriendsOfFriendsProcessorResponse defaultInstance; - public static FriendsOfFriendsProcessorResponse getDefaultInstance() { - return defaultInstance; - } - - public FriendsOfFriendsProcessorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private FriendsOfFriendsProcessorResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - result_.add(input.readBytes()); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new com.google.protobuf.UnmodifiableLazyStringList(result_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public FriendsOfFriendsProcessorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FriendsOfFriendsProcessorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated string result = 1; - public static final int RESULT_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList result_; - /** - * repeated string result = 1; - */ - public java.util.List - getResultList() { - return result_; - } - /** - * repeated string result = 1; - */ - public int getResultCount() { - return result_.size(); - } - /** - * repeated string result = 1; - */ - public java.lang.String getResult(int index) { - return result_.get(index); - } - /** - * repeated string result = 1; - */ - public com.google.protobuf.ByteString - getResultBytes(int index) { - return result_.getByteString(index); - } - - private void initFields() { - result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < result_.size(); i++) { - output.writeBytes(1, result_.getByteString(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < result_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(result_.getByteString(i)); - } - size += dataSize; - size += 1 * getResultList().size(); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) obj; - - boolean result = true; - result = result && getResultList() - .equals(other.getResultList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getResultCount() > 0) { - hash = (37 * hash) + RESULT_FIELD_NUMBER; - hash = (53 * hash) + getResultList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code FriendsOfFriendsProcessorResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new com.google.protobuf.UnmodifiableLazyStringList( - result_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.result_ = result_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.getDefaultInstance()) return this; - if (!other.result_.isEmpty()) { - if (result_.isEmpty()) { - result_ = other.result_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureResultIsMutable(); - result_.addAll(other.result_); - } - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // repeated string result = 1; - private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureResultIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - result_ = new com.google.protobuf.LazyStringArrayList(result_); - bitField0_ |= 0x00000001; - } - } - /** - * repeated string result = 1; - */ - public java.util.List - getResultList() { - return java.util.Collections.unmodifiableList(result_); - } - /** - * repeated string result = 1; - */ - public int getResultCount() { - return result_.size(); - } - /** - * repeated string result = 1; - */ - public java.lang.String getResult(int index) { - return result_.get(index); - } - /** - * repeated string result = 1; - */ - public com.google.protobuf.ByteString - getResultBytes(int index) { - return result_.getByteString(index); - } - /** - * repeated string result = 1; - */ - public Builder setResult( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.set(index, value); - onChanged(); - return this; - } - /** - * repeated string result = 1; - */ - public Builder addResult( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - return this; - } - /** - * repeated string result = 1; - */ - public Builder addAllResult( - java.lang.Iterable values) { - ensureResultIsMutable(); - super.addAll(values, result_); - onChanged(); - return this; - } - /** - * repeated string result = 1; - */ - public Builder clearResult() { - result_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - /** - * repeated string result = 1; - */ - public Builder addResultBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureResultIsMutable(); - result_.add(value); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorResponse) - } - - static { - defaultInstance = new FriendsOfFriendsProcessorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorResponse) - } - - public interface RowSwapProcessorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row1 = 1; - /** - * required bytes row1 = 1; - */ - boolean hasRow1(); - /** - * required bytes row1 = 1; - */ - com.google.protobuf.ByteString getRow1(); - - // required bytes row2 = 2; - /** - * required bytes row2 = 2; - */ - boolean hasRow2(); - /** - * required bytes row2 = 2; - */ - com.google.protobuf.ByteString getRow2(); - } - /** - * Protobuf type {@code RowSwapProcessorRequest} - */ - public static final class RowSwapProcessorRequest extends - com.google.protobuf.GeneratedMessage - implements RowSwapProcessorRequestOrBuilder { - // Use RowSwapProcessorRequest.newBuilder() to construct. - private RowSwapProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RowSwapProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RowSwapProcessorRequest defaultInstance; - public static RowSwapProcessorRequest getDefaultInstance() { - return defaultInstance; - } - - public RowSwapProcessorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RowSwapProcessorRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row1_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - row2_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RowSwapProcessorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RowSwapProcessorRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes row1 = 1; - public static final int ROW1_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row1_; - /** - * required bytes row1 = 1; - */ - public boolean hasRow1() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes row1 = 1; - */ - public com.google.protobuf.ByteString getRow1() { - return row1_; - } - - // required bytes row2 = 2; - public static final int ROW2_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString row2_; - /** - * required bytes row2 = 2; - */ - public boolean hasRow2() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required bytes row2 = 2; - */ - public com.google.protobuf.ByteString getRow2() { - return row2_; - } - - private void initFields() { - row1_ = com.google.protobuf.ByteString.EMPTY; - row2_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow1()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRow2()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row1_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, row2_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row1_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, row2_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) obj; - - boolean result = true; - result = result && (hasRow1() == other.hasRow1()); - if (hasRow1()) { - result = result && getRow1() - .equals(other.getRow1()); - } - result = result && (hasRow2() == other.hasRow2()); - if (hasRow2()) { - result = result && getRow2() - .equals(other.getRow2()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow1()) { - hash = (37 * hash) + ROW1_FIELD_NUMBER; - hash = (53 * hash) + getRow1().hashCode(); - } - if (hasRow2()) { - hash = (37 * hash) + ROW2_FIELD_NUMBER; - hash = (53 * hash) + getRow2().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code RowSwapProcessorRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row1_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - row2_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row1_ = row1_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.row2_ = row2_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.getDefaultInstance()) return this; - if (other.hasRow1()) { - setRow1(other.getRow1()); - } - if (other.hasRow2()) { - setRow2(other.getRow2()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow1()) { - - return false; - } - if (!hasRow2()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes row1 = 1; - private com.google.protobuf.ByteString row1_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes row1 = 1; - */ - public boolean hasRow1() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes row1 = 1; - */ - public com.google.protobuf.ByteString getRow1() { - return row1_; - } - /** - * required bytes row1 = 1; - */ - public Builder setRow1(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row1_ = value; - onChanged(); - return this; - } - /** - * required bytes row1 = 1; - */ - public Builder clearRow1() { - bitField0_ = (bitField0_ & ~0x00000001); - row1_ = getDefaultInstance().getRow1(); - onChanged(); - return this; - } - - // required bytes row2 = 2; - private com.google.protobuf.ByteString row2_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes row2 = 2; - */ - public boolean hasRow2() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required bytes row2 = 2; - */ - public com.google.protobuf.ByteString getRow2() { - return row2_; - } - /** - * required bytes row2 = 2; - */ - public Builder setRow2(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - row2_ = value; - onChanged(); - return this; - } - /** - * required bytes row2 = 2; - */ - public Builder clearRow2() { - bitField0_ = (bitField0_ & ~0x00000002); - row2_ = getDefaultInstance().getRow2(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RowSwapProcessorRequest) - } - - static { - defaultInstance = new RowSwapProcessorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RowSwapProcessorRequest) - } - - public interface RowSwapProcessorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code RowSwapProcessorResponse} - */ - public static final class RowSwapProcessorResponse extends - com.google.protobuf.GeneratedMessage - implements RowSwapProcessorResponseOrBuilder { - // Use RowSwapProcessorResponse.newBuilder() to construct. - private RowSwapProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RowSwapProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RowSwapProcessorResponse defaultInstance; - public static RowSwapProcessorResponse getDefaultInstance() { - return defaultInstance; - } - - public RowSwapProcessorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RowSwapProcessorResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public RowSwapProcessorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RowSwapProcessorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code RowSwapProcessorResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:RowSwapProcessorResponse) - } - - static { - defaultInstance = new RowSwapProcessorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RowSwapProcessorResponse) - } - - public interface TimeoutProcessorRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes row = 1; - /** - * required bytes row = 1; - */ - boolean hasRow(); - /** - * required bytes row = 1; - */ - com.google.protobuf.ByteString getRow(); - } - /** - * Protobuf type {@code TimeoutProcessorRequest} - */ - public static final class TimeoutProcessorRequest extends - com.google.protobuf.GeneratedMessage - implements TimeoutProcessorRequestOrBuilder { - // Use TimeoutProcessorRequest.newBuilder() to construct. - private TimeoutProcessorRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TimeoutProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TimeoutProcessorRequest defaultInstance; - public static TimeoutProcessorRequest getDefaultInstance() { - return defaultInstance; - } - - public TimeoutProcessorRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private TimeoutProcessorRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - row_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TimeoutProcessorRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TimeoutProcessorRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes row = 1; - public static final int ROW_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString row_; - /** - * required bytes row = 1; - */ - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes row = 1; - */ - public com.google.protobuf.ByteString getRow() { - return row_; - } - - private void initFields() { - row_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRow()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, row_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, row_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) obj; - - boolean result = true; - result = result && (hasRow() == other.hasRow()); - if (hasRow()) { - result = result && getRow() - .equals(other.getRow()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRow()) { - hash = (37 * hash) + ROW_FIELD_NUMBER; - hash = (53 * hash) + getRow().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code TimeoutProcessorRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - row_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.row_ = row_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.getDefaultInstance()) return this; - if (other.hasRow()) { - setRow(other.getRow()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRow()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes row = 1; - private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; - /** - * required bytes row = 1; - */ - public boolean hasRow() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes row = 1; - */ - public com.google.protobuf.ByteString getRow() { - return row_; - } - /** - * required bytes row = 1; - */ - public Builder setRow(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - row_ = value; - onChanged(); - return this; - } - /** - * required bytes row = 1; - */ - public Builder clearRow() { - bitField0_ = (bitField0_ & ~0x00000001); - row_ = getDefaultInstance().getRow(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:TimeoutProcessorRequest) - } - - static { - defaultInstance = new TimeoutProcessorRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TimeoutProcessorRequest) - } - - public interface TimeoutProcessorResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code TimeoutProcessorResponse} - */ - public static final class TimeoutProcessorResponse extends - com.google.protobuf.GeneratedMessage - implements TimeoutProcessorResponseOrBuilder { - // Use TimeoutProcessorResponse.newBuilder() to construct. - private TimeoutProcessorResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private TimeoutProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final TimeoutProcessorResponse defaultInstance; - public static TimeoutProcessorResponse getDefaultInstance() { - return defaultInstance; - } - - public TimeoutProcessorResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private TimeoutProcessorResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public TimeoutProcessorResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new TimeoutProcessorResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code TimeoutProcessorResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse build() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:TimeoutProcessorResponse) - } - - static { - defaultInstance = new TimeoutProcessorResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:TimeoutProcessorResponse) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_IncCounterProcessorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_IncCounterProcessorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_IncCounterProcessorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_IncCounterProcessorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FriendsOfFriendsProcessorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_FriendsOfFriendsProcessorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RowSwapProcessorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RowSwapProcessorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_RowSwapProcessorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RowSwapProcessorResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TimeoutProcessorRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TimeoutProcessorRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_TimeoutProcessorResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TimeoutProcessorResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\037IncrementCounterProcessor.proto\":\n\032Inc" + - "CounterProcessorRequest\022\013\n\003row\030\001 \002(\014\022\017\n\007" + - "counter\030\002 \002(\005\"/\n\033IncCounterProcessorResp" + - "onse\022\020\n\010response\030\001 \002(\005\"O\n FriendsOfFrien" + - "dsProcessorRequest\022\016\n\006person\030\001 \002(\014\022\013\n\003ro" + - "w\030\002 \002(\014\022\016\n\006result\030\003 \003(\t\"3\n!FriendsOfFrie" + - "ndsProcessorResponse\022\016\n\006result\030\001 \003(\t\"5\n\027" + - "RowSwapProcessorRequest\022\014\n\004row1\030\001 \002(\014\022\014\n" + - "\004row2\030\002 \002(\014\"\032\n\030RowSwapProcessorResponse\"" + - "&\n\027TimeoutProcessorRequest\022\013\n\003row\030\001 \002(\014\"", - "\032\n\030TimeoutProcessorResponseB`\n6org.apach" + - "e.hadoop.hbase.coprocessor.protobuf.gene" + - "ratedB#IncrementCounterProcessorTestProt" + - "os\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_IncCounterProcessorRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_IncCounterProcessorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_IncCounterProcessorRequest_descriptor, - new java.lang.String[] { "Row", "Counter", }); - internal_static_IncCounterProcessorResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_IncCounterProcessorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_IncCounterProcessorResponse_descriptor, - new java.lang.String[] { "Response", }); - internal_static_FriendsOfFriendsProcessorRequest_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FriendsOfFriendsProcessorRequest_descriptor, - new java.lang.String[] { "Person", "Row", "Result", }); - internal_static_FriendsOfFriendsProcessorResponse_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_FriendsOfFriendsProcessorResponse_descriptor, - new java.lang.String[] { "Result", }); - internal_static_RowSwapProcessorRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_RowSwapProcessorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RowSwapProcessorRequest_descriptor, - new java.lang.String[] { "Row1", "Row2", }); - internal_static_RowSwapProcessorResponse_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_RowSwapProcessorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RowSwapProcessorResponse_descriptor, - new java.lang.String[] { }); - internal_static_TimeoutProcessorRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_TimeoutProcessorRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TimeoutProcessorRequest_descriptor, - new java.lang.String[] { "Row", }); - internal_static_TimeoutProcessorResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_TimeoutProcessorResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TimeoutProcessorResponse_descriptor, - new java.lang.String[] { }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java deleted file mode 100644 index 508790c4e17..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AggregateProtos.java +++ /dev/null @@ -1,2375 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Aggregate.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class AggregateProtos { - private AggregateProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface AggregateRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required string interpreter_class_name = 1; - /** - * required string interpreter_class_name = 1; - * - *
-     ** The request passed to the AggregateService consists of three parts
-     *  (1) the (canonical) classname of the ColumnInterpreter implementation
-     *  (2) the Scan query
-     *  (3) any bytes required to construct the ColumnInterpreter object
-     *      properly
-     * 
- */ - boolean hasInterpreterClassName(); - /** - * required string interpreter_class_name = 1; - * - *
-     ** The request passed to the AggregateService consists of three parts
-     *  (1) the (canonical) classname of the ColumnInterpreter implementation
-     *  (2) the Scan query
-     *  (3) any bytes required to construct the ColumnInterpreter object
-     *      properly
-     * 
- */ - java.lang.String getInterpreterClassName(); - /** - * required string interpreter_class_name = 1; - * - *
-     ** The request passed to the AggregateService consists of three parts
-     *  (1) the (canonical) classname of the ColumnInterpreter implementation
-     *  (2) the Scan query
-     *  (3) any bytes required to construct the ColumnInterpreter object
-     *      properly
-     * 
- */ - com.google.protobuf.ByteString - getInterpreterClassNameBytes(); - - // required .hbase.pb.Scan scan = 2; - /** - * required .hbase.pb.Scan scan = 2; - */ - boolean hasScan(); - /** - * required .hbase.pb.Scan scan = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); - /** - * required .hbase.pb.Scan scan = 2; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - - // optional bytes interpreter_specific_bytes = 3; - /** - * optional bytes interpreter_specific_bytes = 3; - */ - boolean hasInterpreterSpecificBytes(); - /** - * optional bytes interpreter_specific_bytes = 3; - */ - com.google.protobuf.ByteString getInterpreterSpecificBytes(); - } - /** - * Protobuf type {@code hbase.pb.AggregateRequest} - */ - public static final class AggregateRequest extends - com.google.protobuf.GeneratedMessage - implements AggregateRequestOrBuilder { - // Use AggregateRequest.newBuilder() to construct. - private AggregateRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AggregateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AggregateRequest defaultInstance; - public static AggregateRequest getDefaultInstance() { - return defaultInstance; - } - - public AggregateRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private AggregateRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - interpreterClassName_ = input.readBytes(); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = scan_.toBuilder(); - } - scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(scan_); - scan_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - case 26: { - bitField0_ |= 0x00000004; - interpreterSpecificBytes_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AggregateRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AggregateRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required string interpreter_class_name = 1; - public static final int INTERPRETER_CLASS_NAME_FIELD_NUMBER = 1; - private java.lang.Object interpreterClassName_; - /** - * required string interpreter_class_name = 1; - * - *
-     ** The request passed to the AggregateService consists of three parts
-     *  (1) the (canonical) classname of the ColumnInterpreter implementation
-     *  (2) the Scan query
-     *  (3) any bytes required to construct the ColumnInterpreter object
-     *      properly
-     * 
- */ - public boolean hasInterpreterClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string interpreter_class_name = 1; - * - *
-     ** The request passed to the AggregateService consists of three parts
-     *  (1) the (canonical) classname of the ColumnInterpreter implementation
-     *  (2) the Scan query
-     *  (3) any bytes required to construct the ColumnInterpreter object
-     *      properly
-     * 
- */ - public java.lang.String getInterpreterClassName() { - java.lang.Object ref = interpreterClassName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - interpreterClassName_ = s; - } - return s; - } - } - /** - * required string interpreter_class_name = 1; - * - *
-     ** The request passed to the AggregateService consists of three parts
-     *  (1) the (canonical) classname of the ColumnInterpreter implementation
-     *  (2) the Scan query
-     *  (3) any bytes required to construct the ColumnInterpreter object
-     *      properly
-     * 
- */ - public com.google.protobuf.ByteString - getInterpreterClassNameBytes() { - java.lang.Object ref = interpreterClassName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - interpreterClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required .hbase.pb.Scan scan = 2; - public static final int SCAN_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; - /** - * required .hbase.pb.Scan scan = 2; - */ - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - return scan_; - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // optional bytes interpreter_specific_bytes = 3; - public static final int INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString interpreterSpecificBytes_; - /** - * optional bytes interpreter_specific_bytes = 3; - */ - public boolean hasInterpreterSpecificBytes() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional bytes interpreter_specific_bytes = 3; - */ - public com.google.protobuf.ByteString getInterpreterSpecificBytes() { - return interpreterSpecificBytes_; - } - - private void initFields() { - interpreterClassName_ = ""; - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasInterpreterClassName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasScan()) { - memoizedIsInitialized = 0; - return false; - } - if (!getScan().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getInterpreterClassNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, interpreterSpecificBytes_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getInterpreterClassNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, scan_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, interpreterSpecificBytes_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj; - - boolean result = true; - result = result && (hasInterpreterClassName() == other.hasInterpreterClassName()); - if (hasInterpreterClassName()) { - result = result && getInterpreterClassName() - .equals(other.getInterpreterClassName()); - } - result = result && (hasScan() == other.hasScan()); - if (hasScan()) { - result = result && getScan() - .equals(other.getScan()); - } - result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes()); - if (hasInterpreterSpecificBytes()) { - result = result && getInterpreterSpecificBytes() - .equals(other.getInterpreterSpecificBytes()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasInterpreterClassName()) { - hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER; - hash = (53 * hash) + getInterpreterClassName().hashCode(); - } - if (hasScan()) { - hash = (37 * hash) + SCAN_FIELD_NUMBER; - hash = (53 * hash) + getScan().hashCode(); - } - if (hasInterpreterSpecificBytes()) { - hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER; - hash = (53 * hash) + getInterpreterSpecificBytes().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.AggregateRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getScanFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - interpreterClassName_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest build() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.interpreterClassName_ = interpreterClassName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (scanBuilder_ == null) { - result.scan_ = scan_; - } else { - result.scan_ = scanBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.interpreterSpecificBytes_ = interpreterSpecificBytes_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance()) return this; - if (other.hasInterpreterClassName()) { - bitField0_ |= 0x00000001; - interpreterClassName_ = other.interpreterClassName_; - onChanged(); - } - if (other.hasScan()) { - mergeScan(other.getScan()); - } - if (other.hasInterpreterSpecificBytes()) { - setInterpreterSpecificBytes(other.getInterpreterSpecificBytes()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasInterpreterClassName()) { - - return false; - } - if (!hasScan()) { - - return false; - } - if (!getScan().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required string interpreter_class_name = 1; - private java.lang.Object interpreterClassName_ = ""; - /** - * required string interpreter_class_name = 1; - * - *
-       ** The request passed to the AggregateService consists of three parts
-       *  (1) the (canonical) classname of the ColumnInterpreter implementation
-       *  (2) the Scan query
-       *  (3) any bytes required to construct the ColumnInterpreter object
-       *      properly
-       * 
- */ - public boolean hasInterpreterClassName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required string interpreter_class_name = 1; - * - *
-       ** The request passed to the AggregateService consists of three parts
-       *  (1) the (canonical) classname of the ColumnInterpreter implementation
-       *  (2) the Scan query
-       *  (3) any bytes required to construct the ColumnInterpreter object
-       *      properly
-       * 
- */ - public java.lang.String getInterpreterClassName() { - java.lang.Object ref = interpreterClassName_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - interpreterClassName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string interpreter_class_name = 1; - * - *
-       ** The request passed to the AggregateService consists of three parts
-       *  (1) the (canonical) classname of the ColumnInterpreter implementation
-       *  (2) the Scan query
-       *  (3) any bytes required to construct the ColumnInterpreter object
-       *      properly
-       * 
- */ - public com.google.protobuf.ByteString - getInterpreterClassNameBytes() { - java.lang.Object ref = interpreterClassName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - interpreterClassName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string interpreter_class_name = 1; - * - *
-       ** The request passed to the AggregateService consists of three parts
-       *  (1) the (canonical) classname of the ColumnInterpreter implementation
-       *  (2) the Scan query
-       *  (3) any bytes required to construct the ColumnInterpreter object
-       *      properly
-       * 
- */ - public Builder setInterpreterClassName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - interpreterClassName_ = value; - onChanged(); - return this; - } - /** - * required string interpreter_class_name = 1; - * - *
-       ** The request passed to the AggregateService consists of three parts
-       *  (1) the (canonical) classname of the ColumnInterpreter implementation
-       *  (2) the Scan query
-       *  (3) any bytes required to construct the ColumnInterpreter object
-       *      properly
-       * 
- */ - public Builder clearInterpreterClassName() { - bitField0_ = (bitField0_ & ~0x00000001); - interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); - onChanged(); - return this; - } - /** - * required string interpreter_class_name = 1; - * - *
-       ** The request passed to the AggregateService consists of three parts
-       *  (1) the (canonical) classname of the ColumnInterpreter implementation
-       *  (2) the Scan query
-       *  (3) any bytes required to construct the ColumnInterpreter object
-       *      properly
-       * 
- */ - public Builder setInterpreterClassNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - interpreterClassName_ = value; - onChanged(); - return this; - } - - // required .hbase.pb.Scan scan = 2; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; - /** - * required .hbase.pb.Scan scan = 2; - */ - public boolean hasScan() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - if (scanBuilder_ == null) { - return scan_; - } else { - return scanBuilder_.getMessage(); - } - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - scan_ = value; - onChanged(); - } else { - scanBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { - if (scanBuilder_ == null) { - scan_ = builderForValue.build(); - onChanged(); - } else { - scanBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { - scan_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); - } else { - scan_ = value; - } - onChanged(); - } else { - scanBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public Builder clearScan() { - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - onChanged(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getScanFieldBuilder().getBuilder(); - } - /** - * required .hbase.pb.Scan scan = 2; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - if (scanBuilder_ != null) { - return scanBuilder_.getMessageOrBuilder(); - } else { - return scan_; - } - } - /** - * required .hbase.pb.Scan scan = 2; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> - getScanFieldBuilder() { - if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( - scan_, - getParentForChildren(), - isClean()); - scan_ = null; - } - return scanBuilder_; - } - - // optional bytes interpreter_specific_bytes = 3; - private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes interpreter_specific_bytes = 3; - */ - public boolean hasInterpreterSpecificBytes() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional bytes interpreter_specific_bytes = 3; - */ - public com.google.protobuf.ByteString getInterpreterSpecificBytes() { - return interpreterSpecificBytes_; - } - /** - * optional bytes interpreter_specific_bytes = 3; - */ - public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - interpreterSpecificBytes_ = value; - onChanged(); - return this; - } - /** - * optional bytes interpreter_specific_bytes = 3; - */ - public Builder clearInterpreterSpecificBytes() { - bitField0_ = (bitField0_ & ~0x00000004); - interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateRequest) - } - - static { - defaultInstance = new AggregateRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.AggregateRequest) - } - - public interface AggregateResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes first_part = 1; - /** - * repeated bytes first_part = 1; - * - *
-     **
-     * The AggregateService methods all have a response that either is a Pair
-     * or a simple object. When it is a Pair both first_part and second_part
-     * have defined values (and the second_part is not present in the response
-     * when the response is not a pair). Refer to the AggregateImplementation 
-     * class for an overview of the AggregateResponse object constructions. 
-     * 
- */ - java.util.List getFirstPartList(); - /** - * repeated bytes first_part = 1; - * - *
-     **
-     * The AggregateService methods all have a response that either is a Pair
-     * or a simple object. When it is a Pair both first_part and second_part
-     * have defined values (and the second_part is not present in the response
-     * when the response is not a pair). Refer to the AggregateImplementation 
-     * class for an overview of the AggregateResponse object constructions. 
-     * 
- */ - int getFirstPartCount(); - /** - * repeated bytes first_part = 1; - * - *
-     **
-     * The AggregateService methods all have a response that either is a Pair
-     * or a simple object. When it is a Pair both first_part and second_part
-     * have defined values (and the second_part is not present in the response
-     * when the response is not a pair). Refer to the AggregateImplementation 
-     * class for an overview of the AggregateResponse object constructions. 
-     * 
- */ - com.google.protobuf.ByteString getFirstPart(int index); - - // optional bytes second_part = 2; - /** - * optional bytes second_part = 2; - */ - boolean hasSecondPart(); - /** - * optional bytes second_part = 2; - */ - com.google.protobuf.ByteString getSecondPart(); - } - /** - * Protobuf type {@code hbase.pb.AggregateResponse} - */ - public static final class AggregateResponse extends - com.google.protobuf.GeneratedMessage - implements AggregateResponseOrBuilder { - // Use AggregateResponse.newBuilder() to construct. - private AggregateResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AggregateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AggregateResponse defaultInstance; - public static AggregateResponse getDefaultInstance() { - return defaultInstance; - } - - public AggregateResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private AggregateResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - firstPart_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - firstPart_.add(input.readBytes()); - break; - } - case 18: { - bitField0_ |= 0x00000001; - secondPart_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - firstPart_ = java.util.Collections.unmodifiableList(firstPart_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public AggregateResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AggregateResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // repeated bytes first_part = 1; - public static final int FIRST_PART_FIELD_NUMBER = 1; - private java.util.List firstPart_; - /** - * repeated bytes first_part = 1; - * - *
-     **
-     * The AggregateService methods all have a response that either is a Pair
-     * or a simple object. When it is a Pair both first_part and second_part
-     * have defined values (and the second_part is not present in the response
-     * when the response is not a pair). Refer to the AggregateImplementation 
-     * class for an overview of the AggregateResponse object constructions. 
-     * 
- */ - public java.util.List - getFirstPartList() { - return firstPart_; - } - /** - * repeated bytes first_part = 1; - * - *
-     **
-     * The AggregateService methods all have a response that either is a Pair
-     * or a simple object. When it is a Pair both first_part and second_part
-     * have defined values (and the second_part is not present in the response
-     * when the response is not a pair). Refer to the AggregateImplementation 
-     * class for an overview of the AggregateResponse object constructions. 
-     * 
- */ - public int getFirstPartCount() { - return firstPart_.size(); - } - /** - * repeated bytes first_part = 1; - * - *
-     **
-     * The AggregateService methods all have a response that either is a Pair
-     * or a simple object. When it is a Pair both first_part and second_part
-     * have defined values (and the second_part is not present in the response
-     * when the response is not a pair). Refer to the AggregateImplementation 
-     * class for an overview of the AggregateResponse object constructions. 
-     * 
- */ - public com.google.protobuf.ByteString getFirstPart(int index) { - return firstPart_.get(index); - } - - // optional bytes second_part = 2; - public static final int SECOND_PART_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString secondPart_; - /** - * optional bytes second_part = 2; - */ - public boolean hasSecondPart() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional bytes second_part = 2; - */ - public com.google.protobuf.ByteString getSecondPart() { - return secondPart_; - } - - private void initFields() { - firstPart_ = java.util.Collections.emptyList(); - secondPart_ = com.google.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < firstPart_.size(); i++) { - output.writeBytes(1, firstPart_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(2, secondPart_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < firstPart_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(firstPart_.get(i)); - } - size += dataSize; - size += 1 * getFirstPartList().size(); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, secondPart_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj; - - boolean result = true; - result = result && getFirstPartList() - .equals(other.getFirstPartList()); - result = result && (hasSecondPart() == other.hasSecondPart()); - if (hasSecondPart()) { - result = result && getSecondPart() - .equals(other.getSecondPart()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getFirstPartCount() > 0) { - hash = (37 * hash) + FIRST_PART_FIELD_NUMBER; - hash = (53 * hash) + getFirstPartList().hashCode(); - } - if (hasSecondPart()) { - hash = (37 * hash) + SECOND_PART_FIELD_NUMBER; - hash = (53 * hash) + getSecondPart().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.AggregateResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - firstPart_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - secondPart_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse build() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse result = new org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - firstPart_ = java.util.Collections.unmodifiableList(firstPart_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.firstPart_ = firstPart_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.secondPart_ = secondPart_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()) return this; - if (!other.firstPart_.isEmpty()) { - if (firstPart_.isEmpty()) { - firstPart_ = other.firstPart_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureFirstPartIsMutable(); - firstPart_.addAll(other.firstPart_); - } - onChanged(); - } - if (other.hasSecondPart()) { - setSecondPart(other.getSecondPart()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // repeated bytes first_part = 1; - private java.util.List firstPart_ = java.util.Collections.emptyList(); - private void ensureFirstPartIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - firstPart_ = new java.util.ArrayList(firstPart_); - bitField0_ |= 0x00000001; - } - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public java.util.List - getFirstPartList() { - return java.util.Collections.unmodifiableList(firstPart_); - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public int getFirstPartCount() { - return firstPart_.size(); - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public com.google.protobuf.ByteString getFirstPart(int index) { - return firstPart_.get(index); - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public Builder setFirstPart( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureFirstPartIsMutable(); - firstPart_.set(index, value); - onChanged(); - return this; - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public Builder addFirstPart(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureFirstPartIsMutable(); - firstPart_.add(value); - onChanged(); - return this; - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public Builder addAllFirstPart( - java.lang.Iterable values) { - ensureFirstPartIsMutable(); - super.addAll(values, firstPart_); - onChanged(); - return this; - } - /** - * repeated bytes first_part = 1; - * - *
-       **
-       * The AggregateService methods all have a response that either is a Pair
-       * or a simple object. When it is a Pair both first_part and second_part
-       * have defined values (and the second_part is not present in the response
-       * when the response is not a pair). Refer to the AggregateImplementation 
-       * class for an overview of the AggregateResponse object constructions. 
-       * 
- */ - public Builder clearFirstPart() { - firstPart_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // optional bytes second_part = 2; - private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY; - /** - * optional bytes second_part = 2; - */ - public boolean hasSecondPart() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bytes second_part = 2; - */ - public com.google.protobuf.ByteString getSecondPart() { - return secondPart_; - } - /** - * optional bytes second_part = 2; - */ - public Builder setSecondPart(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - secondPart_ = value; - onChanged(); - return this; - } - /** - * optional bytes second_part = 2; - */ - public Builder clearSecondPart() { - bitField0_ = (bitField0_ & ~0x00000002); - secondPart_ = getDefaultInstance().getSecondPart(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateResponse) - } - - static { - defaultInstance = new AggregateResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.AggregateResponse) - } - - /** - * Protobuf service {@code hbase.pb.AggregateService} - * - *
-   ** Refer to the AggregateImplementation class for an overview of the 
-   *  AggregateService method implementations and their functionality.
-   * 
- */ - public static abstract class AggregateService - implements com.google.protobuf.Service { - protected AggregateService() {} - - public interface Interface { - /** - * rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new AggregateService() { - @java.lang.Override - public void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getMax(controller, request, done); - } - - @java.lang.Override - public void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getMin(controller, request, done); - } - - @java.lang.Override - public void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getSum(controller, request, done); - } - - @java.lang.Override - public void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getRowNum(controller, request, done); - } - - @java.lang.Override - public void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getAvg(controller, request, done); - } - - @java.lang.Override - public void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getStd(controller, request, done); - } - - @java.lang.Override - public void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - impl.getMedian(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - case 1: - return impl.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - case 2: - return impl.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - case 3: - return impl.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - case 4: - return impl.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - case 5: - return impl.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - case 6: - return impl.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse); - */ - public abstract void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getMax(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getMin(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.getSum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 3: - this.getRowNum(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 4: - this.getAvg(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 5: - this.getStd(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 6: - this.getMedian(controller, (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 3: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 4: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 5: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - case 6: - return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - - public void getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMax( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMin( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getSum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getRowNum( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(3), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getAvg( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(4), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getStd( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(5), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getMedian( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(6), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.pb.AggregateService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_AggregateRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_AggregateRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_AggregateResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_AggregateResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\017Aggregate.proto\022\010hbase.pb\032\014Client.prot" + - "o\"t\n\020AggregateRequest\022\036\n\026interpreter_cla" + - "ss_name\030\001 \002(\t\022\034\n\004scan\030\002 \002(\0132\016.hbase.pb.S" + - "can\022\"\n\032interpreter_specific_bytes\030\003 \001(\014\"" + - "<\n\021AggregateResponse\022\022\n\nfirst_part\030\001 \003(\014" + - "\022\023\n\013second_part\030\002 \001(\0142\355\003\n\020AggregateServi" + - "ce\022A\n\006GetMax\022\032.hbase.pb.AggregateRequest" + - "\032\033.hbase.pb.AggregateResponse\022A\n\006GetMin\022" + - "\032.hbase.pb.AggregateRequest\032\033.hbase.pb.A" + - "ggregateResponse\022A\n\006GetSum\022\032.hbase.pb.Ag", - "gregateRequest\032\033.hbase.pb.AggregateRespo" + - "nse\022D\n\tGetRowNum\022\032.hbase.pb.AggregateReq" + - "uest\032\033.hbase.pb.AggregateResponse\022A\n\006Get" + - "Avg\022\032.hbase.pb.AggregateRequest\032\033.hbase." + - "pb.AggregateResponse\022A\n\006GetStd\022\032.hbase.p" + - "b.AggregateRequest\032\033.hbase.pb.AggregateR" + - "esponse\022D\n\tGetMedian\022\032.hbase.pb.Aggregat" + - "eRequest\032\033.hbase.pb.AggregateResponseBE\n" + - "*org.apache.hadoop.hbase.protobuf.genera" + - "tedB\017AggregateProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_AggregateRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_AggregateRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AggregateRequest_descriptor, - new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", }); - internal_static_hbase_pb_AggregateResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_AggregateResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_AggregateResponse_descriptor, - new java.lang.String[] { "FirstPart", "SecondPart", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java deleted file mode 100644 index 86c88c689a2..00000000000 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java +++ /dev/null @@ -1,2088 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: SecureBulkLoad.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class SecureBulkLoadProtos { - private SecureBulkLoadProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface SecureBulkLoadHFilesRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - java.util.List - getFamilyPathList(); - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - int getFamilyPathCount(); - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - java.util.List - getFamilyPathOrBuilderList(); - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index); - - // optional bool assign_seq_num = 2; - /** - * optional bool assign_seq_num = 2; - */ - boolean hasAssignSeqNum(); - /** - * optional bool assign_seq_num = 2; - */ - boolean getAssignSeqNum(); - - // required .hbase.pb.DelegationToken fs_token = 3; - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - boolean hasFsToken(); - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken(); - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder(); - - // required string bulk_token = 4; - /** - * required string bulk_token = 4; - */ - boolean hasBulkToken(); - /** - * required string bulk_token = 4; - */ - java.lang.String getBulkToken(); - /** - * required string bulk_token = 4; - */ - com.google.protobuf.ByteString - getBulkTokenBytes(); - } - /** - * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesRequest} - */ - public static final class SecureBulkLoadHFilesRequest extends - com.google.protobuf.GeneratedMessage - implements SecureBulkLoadHFilesRequestOrBuilder { - // Use SecureBulkLoadHFilesRequest.newBuilder() to construct. - private SecureBulkLoadHFilesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SecureBulkLoadHFilesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SecureBulkLoadHFilesRequest defaultInstance; - public static SecureBulkLoadHFilesRequest getDefaultInstance() { - return defaultInstance; - } - - public SecureBulkLoadHFilesRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SecureBulkLoadHFilesRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - familyPath_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry)); - break; - } - case 16: { - bitField0_ |= 0x00000001; - assignSeqNum_ = input.readBool(); - break; - } - case 26: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = fsToken_.toBuilder(); - } - fsToken_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(fsToken_); - fsToken_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - case 34: { - bitField0_ |= 0x00000004; - bulkToken_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - familyPath_ = java.util.Collections.unmodifiableList(familyPath_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SecureBulkLoadHFilesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SecureBulkLoadHFilesRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - public static final int FAMILY_PATH_FIELD_NUMBER = 1; - private java.util.List familyPath_; - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public java.util.List getFamilyPathList() { - return familyPath_; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public java.util.List - getFamilyPathOrBuilderList() { - return familyPath_; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public int getFamilyPathCount() { - return familyPath_.size(); - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { - return familyPath_.get(index); - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index) { - return familyPath_.get(index); - } - - // optional bool assign_seq_num = 2; - public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 2; - private boolean assignSeqNum_; - /** - * optional bool assign_seq_num = 2; - */ - public boolean hasAssignSeqNum() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional bool assign_seq_num = 2; - */ - public boolean getAssignSeqNum() { - return assignSeqNum_; - } - - // required .hbase.pb.DelegationToken fs_token = 3; - public static final int FS_TOKEN_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_; - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public boolean hasFsToken() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { - return fsToken_; - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { - return fsToken_; - } - - // required string bulk_token = 4; - public static final int BULK_TOKEN_FIELD_NUMBER = 4; - private java.lang.Object bulkToken_; - /** - * required string bulk_token = 4; - */ - public boolean hasBulkToken() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * required string bulk_token = 4; - */ - public java.lang.String getBulkToken() { - java.lang.Object ref = bulkToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - bulkToken_ = s; - } - return s; - } - } - /** - * required string bulk_token = 4; - */ - public com.google.protobuf.ByteString - getBulkTokenBytes() { - java.lang.Object ref = bulkToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - bulkToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - familyPath_ = java.util.Collections.emptyList(); - assignSeqNum_ = false; - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); - bulkToken_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFsToken()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasBulkToken()) { - memoizedIsInitialized = 0; - return false; - } - for (int i = 0; i < getFamilyPathCount(); i++) { - if (!getFamilyPath(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < familyPath_.size(); i++) { - output.writeMessage(1, familyPath_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(2, assignSeqNum_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(3, fsToken_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(4, getBulkTokenBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < familyPath_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, familyPath_.get(i)); - } - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, assignSeqNum_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, fsToken_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getBulkTokenBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) obj; - - boolean result = true; - result = result && getFamilyPathList() - .equals(other.getFamilyPathList()); - result = result && (hasAssignSeqNum() == other.hasAssignSeqNum()); - if (hasAssignSeqNum()) { - result = result && (getAssignSeqNum() - == other.getAssignSeqNum()); - } - result = result && (hasFsToken() == other.hasFsToken()); - if (hasFsToken()) { - result = result && getFsToken() - .equals(other.getFsToken()); - } - result = result && (hasBulkToken() == other.hasBulkToken()); - if (hasBulkToken()) { - result = result && getBulkToken() - .equals(other.getBulkToken()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getFamilyPathCount() > 0) { - hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER; - hash = (53 * hash) + getFamilyPathList().hashCode(); - } - if (hasAssignSeqNum()) { - hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getAssignSeqNum()); - } - if (hasFsToken()) { - hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER; - hash = (53 * hash) + getFsToken().hashCode(); - } - if (hasBulkToken()) { - hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER; - hash = (53 * hash) + getBulkToken().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFamilyPathFieldBuilder(); - getFsTokenFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (familyPathBuilder_ == null) { - familyPath_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - familyPathBuilder_.clear(); - } - assignSeqNum_ = false; - bitField0_ = (bitField0_ & ~0x00000002); - if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); - } else { - fsTokenBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - bulkToken_ = ""; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (familyPathBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - familyPath_ = java.util.Collections.unmodifiableList(familyPath_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.familyPath_ = familyPath_; - } else { - result.familyPath_ = familyPathBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000001; - } - result.assignSeqNum_ = assignSeqNum_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000002; - } - if (fsTokenBuilder_ == null) { - result.fsToken_ = fsToken_; - } else { - result.fsToken_ = fsTokenBuilder_.build(); - } - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000004; - } - result.bulkToken_ = bulkToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance()) return this; - if (familyPathBuilder_ == null) { - if (!other.familyPath_.isEmpty()) { - if (familyPath_.isEmpty()) { - familyPath_ = other.familyPath_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureFamilyPathIsMutable(); - familyPath_.addAll(other.familyPath_); - } - onChanged(); - } - } else { - if (!other.familyPath_.isEmpty()) { - if (familyPathBuilder_.isEmpty()) { - familyPathBuilder_.dispose(); - familyPathBuilder_ = null; - familyPath_ = other.familyPath_; - bitField0_ = (bitField0_ & ~0x00000001); - familyPathBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFamilyPathFieldBuilder() : null; - } else { - familyPathBuilder_.addAllMessages(other.familyPath_); - } - } - } - if (other.hasAssignSeqNum()) { - setAssignSeqNum(other.getAssignSeqNum()); - } - if (other.hasFsToken()) { - mergeFsToken(other.getFsToken()); - } - if (other.hasBulkToken()) { - bitField0_ |= 0x00000008; - bulkToken_ = other.bulkToken_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFsToken()) { - - return false; - } - if (!hasBulkToken()) { - - return false; - } - for (int i = 0; i < getFamilyPathCount(); i++) { - if (!getFamilyPath(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - private java.util.List familyPath_ = - java.util.Collections.emptyList(); - private void ensureFamilyPathIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - familyPath_ = new java.util.ArrayList(familyPath_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; - - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public java.util.List getFamilyPathList() { - if (familyPathBuilder_ == null) { - return java.util.Collections.unmodifiableList(familyPath_); - } else { - return familyPathBuilder_.getMessageList(); - } - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public int getFamilyPathCount() { - if (familyPathBuilder_ == null) { - return familyPath_.size(); - } else { - return familyPathBuilder_.getCount(); - } - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { - if (familyPathBuilder_ == null) { - return familyPath_.get(index); - } else { - return familyPathBuilder_.getMessage(index); - } - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.set(index, value); - onChanged(); - } else { - familyPathBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder setFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.set(index, builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.add(value); - onChanged(); - } else { - familyPathBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { - if (familyPathBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyPathIsMutable(); - familyPath_.add(index, value); - onChanged(); - } else { - familyPathBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder addFamilyPath( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.add(builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder addFamilyPath( - int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.add(index, builderForValue.build()); - onChanged(); - } else { - familyPathBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder addAllFamilyPath( - java.lang.Iterable values) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - super.addAll(values, familyPath_); - onChanged(); - } else { - familyPathBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder clearFamilyPath() { - if (familyPathBuilder_ == null) { - familyPath_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - familyPathBuilder_.clear(); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public Builder removeFamilyPath(int index) { - if (familyPathBuilder_ == null) { - ensureFamilyPathIsMutable(); - familyPath_.remove(index); - onChanged(); - } else { - familyPathBuilder_.remove(index); - } - return this; - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( - int index) { - return getFamilyPathFieldBuilder().getBuilder(index); - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( - int index) { - if (familyPathBuilder_ == null) { - return familyPath_.get(index); } else { - return familyPathBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public java.util.List - getFamilyPathOrBuilderList() { - if (familyPathBuilder_ != null) { - return familyPathBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(familyPath_); - } - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { - return getFamilyPathFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( - int index) { - return getFamilyPathFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); - } - /** - * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1; - */ - public java.util.List - getFamilyPathBuilderList() { - return getFamilyPathFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> - getFamilyPathFieldBuilder() { - if (familyPathBuilder_ == null) { - familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( - familyPath_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - familyPath_ = null; - } - return familyPathBuilder_; - } - - // optional bool assign_seq_num = 2; - private boolean assignSeqNum_ ; - /** - * optional bool assign_seq_num = 2; - */ - public boolean hasAssignSeqNum() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional bool assign_seq_num = 2; - */ - public boolean getAssignSeqNum() { - return assignSeqNum_; - } - /** - * optional bool assign_seq_num = 2; - */ - public Builder setAssignSeqNum(boolean value) { - bitField0_ |= 0x00000002; - assignSeqNum_ = value; - onChanged(); - return this; - } - /** - * optional bool assign_seq_num = 2; - */ - public Builder clearAssignSeqNum() { - bitField0_ = (bitField0_ & ~0x00000002); - assignSeqNum_ = false; - onChanged(); - return this; - } - - // required .hbase.pb.DelegationToken fs_token = 3; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_; - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public boolean hasFsToken() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() { - if (fsTokenBuilder_ == null) { - return fsToken_; - } else { - return fsTokenBuilder_.getMessage(); - } - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { - if (fsTokenBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - fsToken_ = value; - onChanged(); - } else { - fsTokenBuilder_.setMessage(value); - } - bitField0_ |= 0x00000004; - return this; - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public Builder setFsToken( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) { - if (fsTokenBuilder_ == null) { - fsToken_ = builderForValue.build(); - onChanged(); - } else { - fsTokenBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000004; - return this; - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) { - if (fsTokenBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004) && - fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) { - fsToken_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial(); - } else { - fsToken_ = value; - } - onChanged(); - } else { - fsTokenBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000004; - return this; - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public Builder clearFsToken() { - if (fsTokenBuilder_ == null) { - fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance(); - onChanged(); - } else { - fsTokenBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() { - bitField0_ |= 0x00000004; - onChanged(); - return getFsTokenFieldBuilder().getBuilder(); - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() { - if (fsTokenBuilder_ != null) { - return fsTokenBuilder_.getMessageOrBuilder(); - } else { - return fsToken_; - } - } - /** - * required .hbase.pb.DelegationToken fs_token = 3; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> - getFsTokenFieldBuilder() { - if (fsTokenBuilder_ == null) { - fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>( - fsToken_, - getParentForChildren(), - isClean()); - fsToken_ = null; - } - return fsTokenBuilder_; - } - - // required string bulk_token = 4; - private java.lang.Object bulkToken_ = ""; - /** - * required string bulk_token = 4; - */ - public boolean hasBulkToken() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * required string bulk_token = 4; - */ - public java.lang.String getBulkToken() { - java.lang.Object ref = bulkToken_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - bulkToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * required string bulk_token = 4; - */ - public com.google.protobuf.ByteString - getBulkTokenBytes() { - java.lang.Object ref = bulkToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - bulkToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * required string bulk_token = 4; - */ - public Builder setBulkToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - bulkToken_ = value; - onChanged(); - return this; - } - /** - * required string bulk_token = 4; - */ - public Builder clearBulkToken() { - bitField0_ = (bitField0_ & ~0x00000008); - bulkToken_ = getDefaultInstance().getBulkToken(); - onChanged(); - return this; - } - /** - * required string bulk_token = 4; - */ - public Builder setBulkTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - bulkToken_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.SecureBulkLoadHFilesRequest) - } - - static { - defaultInstance = new SecureBulkLoadHFilesRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesRequest) - } - - public interface SecureBulkLoadHFilesResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bool loaded = 1; - /** - * required bool loaded = 1; - */ - boolean hasLoaded(); - /** - * required bool loaded = 1; - */ - boolean getLoaded(); - } - /** - * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesResponse} - */ - public static final class SecureBulkLoadHFilesResponse extends - com.google.protobuf.GeneratedMessage - implements SecureBulkLoadHFilesResponseOrBuilder { - // Use SecureBulkLoadHFilesResponse.newBuilder() to construct. - private SecureBulkLoadHFilesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private SecureBulkLoadHFilesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final SecureBulkLoadHFilesResponse defaultInstance; - public static SecureBulkLoadHFilesResponse getDefaultInstance() { - return defaultInstance; - } - - public SecureBulkLoadHFilesResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SecureBulkLoadHFilesResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - loaded_ = input.readBool(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public SecureBulkLoadHFilesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SecureBulkLoadHFilesResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bool loaded = 1; - public static final int LOADED_FIELD_NUMBER = 1; - private boolean loaded_; - /** - * required bool loaded = 1; - */ - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bool loaded = 1; - */ - public boolean getLoaded() { - return loaded_; - } - - private void initFields() { - loaded_ = false; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLoaded()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBool(1, loaded_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, loaded_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) obj; - - boolean result = true; - result = result && (hasLoaded() == other.hasLoaded()); - if (hasLoaded()) { - result = result && (getLoaded() - == other.getLoaded()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLoaded()) { - hash = (37 * hash) + LOADED_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getLoaded()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - loaded_ = false; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse build() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.loaded_ = loaded_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()) return this; - if (other.hasLoaded()) { - setLoaded(other.getLoaded()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLoaded()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bool loaded = 1; - private boolean loaded_ ; - /** - * required bool loaded = 1; - */ - public boolean hasLoaded() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bool loaded = 1; - */ - public boolean getLoaded() { - return loaded_; - } - /** - * required bool loaded = 1; - */ - public Builder setLoaded(boolean value) { - bitField0_ |= 0x00000001; - loaded_ = value; - onChanged(); - return this; - } - /** - * required bool loaded = 1; - */ - public Builder clearLoaded() { - bitField0_ = (bitField0_ & ~0x00000001); - loaded_ = false; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.SecureBulkLoadHFilesResponse) - } - - static { - defaultInstance = new SecureBulkLoadHFilesResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesResponse) - } - - /** - * Protobuf service {@code hbase.pb.SecureBulkLoadService} - */ - public static abstract class SecureBulkLoadService - implements com.google.protobuf.Service { - protected SecureBulkLoadService() {} - - public interface Interface { - /** - * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse); - */ - public abstract void prepareBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse); - */ - public abstract void secureBulkLoadHFiles( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse); - */ - public abstract void cleanupBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new SecureBulkLoadService() { - @java.lang.Override - public void prepareBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { - impl.prepareBulkLoad(controller, request, done); - } - - @java.lang.Override - public void secureBulkLoadHFiles( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, - com.google.protobuf.RpcCallback done) { - impl.secureBulkLoadHFiles(controller, request, done); - } - - @java.lang.Override - public void cleanupBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { - impl.cleanupBulkLoad(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request); - case 1: - return impl.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request); - case 2: - return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse); - */ - public abstract void prepareBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse); - */ - public abstract void secureBulkLoadHFiles( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse); - */ - public abstract void cleanupBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 2: - this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); - case 2: - return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void prepareBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance())); - } - - public void secureBulkLoadHFiles( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance())); - } - - public void cleanupBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.class, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse prepareBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse cleanupBulkLoad( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(2), - controller, - request, - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\024SecureBulkLoad.proto\022\010hbase.pb\032\014Client" + - ".proto\"\266\001\n\033SecureBulkLoadHFilesRequest\022>" + - "\n\013family_path\030\001 \003(\0132).hbase.pb.BulkLoadH" + - "FileRequest.FamilyPath\022\026\n\016assign_seq_num" + - "\030\002 \001(\010\022+\n\010fs_token\030\003 \002(\0132\031.hbase.pb.Dele" + - "gationToken\022\022\n\nbulk_token\030\004 \002(\t\".\n\034Secur" + - "eBulkLoadHFilesResponse\022\016\n\006loaded\030\001 \002(\0102" + - "\256\002\n\025SecureBulkLoadService\022V\n\017PrepareBulk" + - "Load\022 .hbase.pb.PrepareBulkLoadRequest\032!" + - ".hbase.pb.PrepareBulkLoadResponse\022e\n\024Sec", - "ureBulkLoadHFiles\022%.hbase.pb.SecureBulkL" + - "oadHFilesRequest\032&.hbase.pb.SecureBulkLo" + - "adHFilesResponse\022V\n\017CleanupBulkLoad\022 .hb" + - "ase.pb.CleanupBulkLoadRequest\032!.hbase.pb" + - ".CleanupBulkLoadResponseBJ\n*org.apache.h" + - "adoop.hbase.protobuf.generatedB\024SecureBu" + - "lkLoadProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_SecureBulkLoadHFilesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SecureBulkLoadHFilesRequest_descriptor, - new java.lang.String[] { "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", }); - internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor, - new java.lang.String[] { "Loaded", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-examples/README.txt b/hbase-examples/README.txt index c47ed4f7fdc..22d11039d83 100644 --- a/hbase-examples/README.txt +++ b/hbase-examples/README.txt @@ -63,27 +63,8 @@ Example code. 3. Execute {./DemoClient}. ON PROTOBUFS -This maven module has protobuf definition files ('.protos') used by hbase -Coprocessor Endpoints examples including tests. Coprocessor -Endpoints are meant to be standalone, independent code not reliant on hbase -internals. They define their Service using protobuf. The protobuf version -they use can be distinct from that used by HBase internally since HBase started -shading its protobuf references. Endpoints have no access to the shaded protobuf -hbase uses. They do have access to the content of hbase-protocol -- the -.protos found in here -- but avoid using as much of this as you can as it is -liable to change. +This maven module has core protobuf definition files ('.protos') used by hbase +examples. -Generation of java files from protobuf .proto files included here is done apart -from the build. Run the generation whenever you make changes to the .orotos files -and then check in the produced java (The reasoning is that change is infrequent -so why pay the price of generating files anew on each build. - -To generate java files from protos run: - - $ mvn compile -Dcompile-protobuf -or - $ mvn compile -Pcompile-protobuf - -After you've done the above, check it and then check in changes (or post a patch -on a JIRA with your definition file changes and the generated files). Be careful -to notice new files and files removed and do appropriate git rm/adds. +Generation of java files from protobuf .proto files included here is done as +part of the build. diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 0eee552782b..9746b9aa47e 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -29,7 +29,7 @@ hbase-examples Apache HBase - Examples Examples of HBase usage - + @@ -46,20 +46,33 @@ true - - maven-surefire-plugin - ${surefire.version} - - - ${surefire.firstPartGroups} - - - - - org.apache.maven.plugins - maven-source-plugin - + ${surefire.firstPartGroups} + + + + + org.apache.maven.plugins + maven-source-plugin + + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + compile-protoc + generate-sources + + compile + + + + @@ -81,7 +94,7 @@ - + @@ -94,7 +107,7 @@ - + @@ -116,16 +129,16 @@ test - org.apache.hbase - hbase-common + org.apache.hbase + hbase-common - org.apache.hbase - hbase-protocol + org.apache.hbase + hbase-protocol - org.apache.hbase - hbase-client + org.apache.hbase + hbase-client org.apache.hbase @@ -140,11 +153,10 @@ hbase-thrift - org.apache.hbase - hbase-testing-util - test + org.apache.hbase + hbase-testing-util + test - org.apache.thrift libthrift @@ -161,158 +173,126 @@ com.google.protobuf protobuf-java - - - - - skipExamplesTests - - - skipExamplesTests - - - - true - true - - - - compile-protobuf - - - compile-protobuf - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - - - compile-protoc - generate-sources - - compile - - - - - - - - - - - - - - hadoop-2.0 - - - - !hadoop.profile - - - - - org.apache.hadoop - hadoop-mapreduce-client-core - - - com.google.guava - guava - - - - - org.apache.hadoop - hadoop-common - - - - - - maven-dependency-plugin - - - create-mrapp-generated-classpath - generate-test-resources - - build-classpath - - - + + skipExamplesTests + + + skipExamplesTests + + + + true + true + + + + + + + hadoop-2.0 + + + + + !hadoop.profile + + + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.apache.hadoop + hadoop-common + + + + + + maven-dependency-plugin + + + create-mrapp-generated-classpath + generate-test-resources + + build-classpath + + + - ${project.build.directory}/test-classes/mrapp-generated-classpath - - - - - - - - - - hadoop-3.0 - - - hadoop.profile - 3.0 - - - - 3.0-SNAPSHOT - - - - org.apache.hadoop - hadoop-common - - - org.apache.hadoop - hadoop-annotations - - - jdk.tools - jdk.tools - - - - - org.apache.hadoop - hadoop-minicluster - - - - - - maven-dependency-plugin - - - create-mrapp-generated-classpath - generate-test-resources - - build-classpath - - - - ${project.build.directory}/test-classes/mrapp-generated-classpath - - - - - - - + ${project.build.directory}/test-classes/mrapp-generated-classpath + + + + + + + diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java deleted file mode 100644 index b780985a9f0..00000000000 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java +++ /dev/null @@ -1,1149 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Examples.proto - -package org.apache.hadoop.hbase.coprocessor.example.generated; - -public final class ExampleProtos { - private ExampleProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface CountRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code hbase.pb.CountRequest} - */ - public static final class CountRequest extends - com.google.protobuf.GeneratedMessage - implements CountRequestOrBuilder { - // Use CountRequest.newBuilder() to construct. - private CountRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CountRequest defaultInstance; - public static CountRequest getDefaultInstance() { - return defaultInstance; - } - - public CountRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CountRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CountRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CountRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.CountRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest build() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.CountRequest) - } - - static { - defaultInstance = new CountRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.CountRequest) - } - - public interface CountResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int64 count = 1 [default = 0]; - /** - * required int64 count = 1 [default = 0]; - */ - boolean hasCount(); - /** - * required int64 count = 1 [default = 0]; - */ - long getCount(); - } - /** - * Protobuf type {@code hbase.pb.CountResponse} - */ - public static final class CountResponse extends - com.google.protobuf.GeneratedMessage - implements CountResponseOrBuilder { - // Use CountResponse.newBuilder() to construct. - private CountResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CountResponse defaultInstance; - public static CountResponse getDefaultInstance() { - return defaultInstance; - } - - public CountResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CountResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - count_ = input.readInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CountResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CountResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required int64 count = 1 [default = 0]; - public static final int COUNT_FIELD_NUMBER = 1; - private long count_; - /** - * required int64 count = 1 [default = 0]; - */ - public boolean hasCount() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 count = 1 [default = 0]; - */ - public long getCount() { - return count_; - } - - private void initFields() { - count_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCount()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, count_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, count_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) obj; - - boolean result = true; - result = result && (hasCount() == other.hasCount()); - if (hasCount()) { - result = result && (getCount() - == other.getCount()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCount()) { - hash = (37 * hash) + COUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCount()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.CountResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - count_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse build() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.count_ = count_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()) return this; - if (other.hasCount()) { - setCount(other.getCount()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCount()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required int64 count = 1 [default = 0]; - private long count_ ; - /** - * required int64 count = 1 [default = 0]; - */ - public boolean hasCount() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 count = 1 [default = 0]; - */ - public long getCount() { - return count_; - } - /** - * required int64 count = 1 [default = 0]; - */ - public Builder setCount(long value) { - bitField0_ |= 0x00000001; - count_ = value; - onChanged(); - return this; - } - /** - * required int64 count = 1 [default = 0]; - */ - public Builder clearCount() { - bitField0_ = (bitField0_ & ~0x00000001); - count_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.CountResponse) - } - - static { - defaultInstance = new CountResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.CountResponse) - } - - /** - * Protobuf service {@code hbase.pb.RowCountService} - */ - public static abstract class RowCountService - implements com.google.protobuf.Service { - protected RowCountService() {} - - public interface Interface { - /** - * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new RowCountService() { - @java.lang.Override - public void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - impl.getRowCount(controller, request, done); - } - - @java.lang.Override - public void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - impl.getKeyValueCount(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request); - case 1: - return impl.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.RowCountService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); - } - - public void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.pb.RowCountService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_CountRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_CountRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_CountResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_CountResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\016Examples.proto\022\010hbase.pb\"\016\n\014CountReque" + - "st\"!\n\rCountResponse\022\020\n\005count\030\001 \002(\003:\00102\226\001" + - "\n\017RowCountService\022>\n\013getRowCount\022\026.hbase" + - ".pb.CountRequest\032\027.hbase.pb.CountRespons" + - "e\022C\n\020getKeyValueCount\022\026.hbase.pb.CountRe" + - "quest\032\027.hbase.pb.CountResponseBN\n5org.ap" + - "ache.hadoop.hbase.coprocessor.example.ge" + - "neratedB\rExampleProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_CountRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_CountRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CountRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_CountResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_CountResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CountResponse_descriptor, - new java.lang.String[] { "Count", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml index 4739fa4c9ae..c3154de3054 100644 --- a/hbase-procedure/pom.xml +++ b/hbase-procedure/pom.xml @@ -69,6 +69,10 @@ test-jar test + + org.apache.hbase.thirdparty + hbase-shaded-protobuf + org.apache.hbase hbase-protocol-shaded diff --git a/hbase-protocol-shaded/README.txt b/hbase-protocol-shaded/README.txt index 26a96659340..b0030fac215 100644 --- a/hbase-protocol-shaded/README.txt +++ b/hbase-protocol-shaded/README.txt @@ -1,53 +1,6 @@ -Please read carefully as the 'menu options' have changed. -What you do in here is not what you do elsewhere to generate -proto java files. - This module has proto files used by core. These protos overlap with protos that are used by coprocessor endpoints -(CPEP) in the module hbase-protocol. So the core versions have +(CPEP) in the module hbase-protocol. So core versions have a different name, the generated classes are relocated -- i.e. shaded -- to a new location; they are moved from org.apache.hadoop.hbase.* to org.apache.hadoop.hbase.shaded. - -This module also includes the protobuf that hbase core depends -on again relocated to live at an offset of -org.apache.hadoop.hbase.shaded so as to avoid clashes with other -versions of protobuf resident on our CLASSPATH included, -transitively or otherwise, by dependencies: i.e. the shaded -protobuf Message class is at -org.apache.hadoop.hbase.shaded.com.google.protobuf.Message -rather than at com.google.protobuf.Message. - -Finally, this module also includes patches applied on top of -protobuf to add functionality not yet in protobuf that we -need now. - -If you make changes to protos, to the protobuf version or to -the patches you want to apply to protobuf, you must rerun the -below step and then check in what it generated: - - $ mvn install -Dcompile-protobuf - -or - - $ mvn install -Pcompile-protobuf - -NOTE: 'install' above whereas other proto generation only needs 'compile' -NOTE: Unlike elsehwere the above command does NOT install this modules jar -into the repo., intentionally. The jar made by the above is a scratch jar -that is part of the process that gets us to a set of files to check in; -it is not for consumption. Run mvn install without the '-Pcompile-protobuf' -option to get this modules' artifact installed in your repo! - -When finished, the content of src/main/java/org/apache/hadoop/hbase/shaded -will have been updated. Make sure all builds and then carefully -check in the changes. Files may have been added or removed -by the steps above. - -The protobuf version used internally by hbase differs from what -is used over in the CPEP hbase-protocol module but mvn takes care -of ensuring we have the right protobuf in place so you don't have to. - -If you have patches for the protobuf, add them to -src/main/patches directory. They will be applied after -protobuf is shaded and unbundled into src/main/java. diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index 43f8ac31292..fff02f8b7bb 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -32,18 +32,12 @@ true - 3.2.0 - - ${project.build.directory}/classes - - src/main/java + 3.3.0 - - ${sources.dir} - ${classes.dir} org.apache.maven.plugins @@ -56,7 +50,7 @@ org.apache.maven.plugins maven-source-plugin - + maven-assembly-plugin @@ -80,6 +74,101 @@ + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + compile-protoc + generate-sources + + compile + + + com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier} + false + true + + + + + + + com.google.code.maven-replacer-plugin + replacer + 1.5.3 + + + generate-sources + + replace + + + + + ${basedir}/target/generated-sources/ + + **/*.java + + + + ([^\.])com.google.protobuf + $1org.apache.hadoop.hbase.shaded.com.google.protobuf + + + + + + org.apache.maven.plugins + maven-shade-plugin + + + package + + shade + + + true + true + + + + com.google.protobuf + org.apache.hadoop.hbase.shaded.com.google.protobuf + + + + + + org.apache.hadoop.hbase.shaded.com.google:* + com.google.protobuf:protobuf-java + com.google.code.findbugs:* + com.google.errorprone:error_prone_annotations + com.google.j2objc:j2objc-annotations + org.codehaus.mojo:animal-sniffer-annotations + org.codehaus.mojo:animal-sniffer-annotations + junit:junit + log4j:log4j + commons-logging:commons-logging + org.apache.hbase:hbase-annotations + com.github.stephenc.fingbugs:* + + + + + + + @@ -115,6 +204,10 @@ excluded above in the shade plugin else the dependency will get bundled--> + + org.apache.hbase.thirdparty + hbase-shaded-protobuf + org.apache.hbase hbase-annotations @@ -147,218 +240,8 @@ true + true - - compile-protobuf - - - - compile-protobuf - - - - compile-protobuf - - ${project.build.directory}/protoc-generated-sources - - ${protoc.sources.dir} - - ${project.build.directory}/protoc-generated-classes - - ${profile.id}.${project.artifactId}-${project.version} - - - - - maven-clean-plugin - - - pre-compile-protoc - generate-sources - - clean - - - - - ${basedir}/src/main/java/org/apache/hadoop/hbase/shaded - - ipc/protobuf/generated/**/*.java - protobuf/generated/**/*.java - com/google/protobuf/**/*.java - - false - - - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - - - compile-protoc - generate-sources - - compile - - - com.google.protobuf:protoc:${internal.protobuf.version}:exe:${os.detected.classifier} - ${protoc.sources.dir} - false - - - - - - org.apache.maven.plugins - maven-jar-plugin - ${maven.jar.version} - - ${jar.finalName} - - - - org.apache.maven.plugins - maven-shade-plugin - ${maven.shade.version} - - - package - - shade - - - true - true - - - com.google.protobuf - org.apache.hadoop.hbase.shaded.com.google.protobuf - - - - - - commons-logging:commons-logging - com.github.stephenc.findbugs:findbugs-annotations - log4j:log4j - org.hamcrest:hamcrest-core - org.mockito:mockito-all - junit:junit - org.apache.hbase:hbase-annotations - - - - - - - - - org.apache.maven.plugins - maven-dependency-plugin - - - unpack - package - - unpack - - - - - ${project.groupId} - ${project.artifactId} - ${project.version} - sources - jar - true - ${basedir}/src/main/java - **/*.java - - - - - - - - org.apache.maven.plugins - maven-patch-plugin - ${maven.patch.version} - - - ${basedir}/.. - false - - - - patch - - 1 - src/main/patches - ${project.build.directory}/patches-applied.txt - true - - package - - - apply - - - - - - org.apache.maven.plugins - maven-install-plugin - ${maven.install.version} - - true - - - - - diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java deleted file mode 100644 index 4a6fefa4ab5..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java +++ /dev/null @@ -1,646 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLite; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * A partial implementation of the {@link Message} interface which implements - * as many methods of that interface as possible in terms of other methods. - * - * @author kenton@google.com Kenton Varda - */ -public abstract class AbstractMessage - // TODO(dweis): Update GeneratedMessage to parameterize with MessageType and BuilderType. - extends AbstractMessageLite - implements Message { - - @Override - public boolean isInitialized() { - return MessageReflection.isInitialized(this); - } - - /** - * Interface for the parent of a Builder that allows the builder to - * communicate invalidations back to the parent for use when using nested - * builders. - */ - protected interface BuilderParent { - - /** - * A builder becomes dirty whenever a field is modified -- including fields - * in nested builders -- and becomes clean when build() is called. Thus, - * when a builder becomes dirty, all its parents become dirty as well, and - * when it becomes clean, all its children become clean. The dirtiness - * state is used to invalidate certain cached values. - *
- * To this end, a builder calls markDirty() on its parent whenever it - * transitions from clean to dirty. The parent must propagate this call to - * its own parent, unless it was already dirty, in which case the - * grandparent must necessarily already be dirty as well. The parent can - * only transition back to "clean" after calling build() on all children. - */ - void markDirty(); - } - - /** Create a nested builder. */ - protected Message.Builder newBuilderForType(BuilderParent parent) { - throw new UnsupportedOperationException("Nested builder is not supported for this type."); - } - - - @Override - public List findInitializationErrors() { - return MessageReflection.findMissingFields(this); - } - - @Override - public String getInitializationErrorString() { - return MessageReflection.delimitWithCommas(findInitializationErrors()); - } - - /** TODO(jieluo): Clear it when all subclasses have implemented this method. */ - @Override - public boolean hasOneof(OneofDescriptor oneof) { - throw new UnsupportedOperationException("hasOneof() is not implemented."); - } - - /** TODO(jieluo): Clear it when all subclasses have implemented this method. */ - @Override - public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) { - throw new UnsupportedOperationException( - "getOneofFieldDescriptor() is not implemented."); - } - - @Override - public final String toString() { - return TextFormat.printToString(this); - } - - @Override - public void writeTo(final CodedOutputStream output) throws IOException { - MessageReflection.writeMessageTo(this, getAllFields(), output, false); - } - - protected int memoizedSize = -1; - - @Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) { - return size; - } - - memoizedSize = MessageReflection.getSerializedSize(this, getAllFields()); - return memoizedSize; - } - - @Override - public boolean equals(final Object other) { - if (other == this) { - return true; - } - if (!(other instanceof Message)) { - return false; - } - final Message otherMessage = (Message) other; - if (getDescriptorForType() != otherMessage.getDescriptorForType()) { - return false; - } - return compareFields(getAllFields(), otherMessage.getAllFields()) && - getUnknownFields().equals(otherMessage.getUnknownFields()); - } - - @Override - public int hashCode() { - int hash = memoizedHashCode; - if (hash == 0) { - hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = hashFields(hash, getAllFields()); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - } - return hash; - } - - private static ByteString toByteString(Object value) { - if (value instanceof byte[]) { - return ByteString.copyFrom((byte[]) value); - } else { - return (ByteString) value; - } - } - - /** - * Compares two bytes fields. The parameters must be either a byte array or a - * ByteString object. They can be of different type though. - */ - private static boolean compareBytes(Object a, Object b) { - if (a instanceof byte[] && b instanceof byte[]) { - return Arrays.equals((byte[])a, (byte[])b); - } - return toByteString(a).equals(toByteString(b)); - } - - /** - * Converts a list of MapEntry messages into a Map used for equals() and - * hashCode(). - */ - @SuppressWarnings({"rawtypes", "unchecked"}) - private static Map convertMapEntryListToMap(List list) { - if (list.isEmpty()) { - return Collections.emptyMap(); - } - Map result = new HashMap(); - Iterator iterator = list.iterator(); - Message entry = (Message) iterator.next(); - Descriptors.Descriptor descriptor = entry.getDescriptorForType(); - Descriptors.FieldDescriptor key = descriptor.findFieldByName("key"); - Descriptors.FieldDescriptor value = descriptor.findFieldByName("value"); - Object fieldValue = entry.getField(value); - if (fieldValue instanceof EnumValueDescriptor) { - fieldValue = ((EnumValueDescriptor) fieldValue).getNumber(); - } - result.put(entry.getField(key), fieldValue); - while (iterator.hasNext()) { - entry = (Message) iterator.next(); - fieldValue = entry.getField(value); - if (fieldValue instanceof EnumValueDescriptor) { - fieldValue = ((EnumValueDescriptor) fieldValue).getNumber(); - } - result.put(entry.getField(key), fieldValue); - } - return result; - } - - /** - * Compares two map fields. The parameters must be a list of MapEntry - * messages. - */ - @SuppressWarnings({"rawtypes", "unchecked"}) - private static boolean compareMapField(Object a, Object b) { - Map ma = convertMapEntryListToMap((List) a); - Map mb = convertMapEntryListToMap((List) b); - return MapFieldLite.equals(ma, mb); - } - - /** - * Compares two set of fields. - * This method is used to implement {@link AbstractMessage#equals(Object)} - * and {@link AbstractMutableMessage#equals(Object)}. It takes special care - * of bytes fields because immutable messages and mutable messages use - * different Java type to reprensent a bytes field and this method should be - * able to compare immutable messages, mutable messages and also an immutable - * message to a mutable message. - */ - static boolean compareFields(Map a, - Map b) { - if (a.size() != b.size()) { - return false; - } - for (FieldDescriptor descriptor : a.keySet()) { - if (!b.containsKey(descriptor)) { - return false; - } - Object value1 = a.get(descriptor); - Object value2 = b.get(descriptor); - if (descriptor.getType() == FieldDescriptor.Type.BYTES) { - if (descriptor.isRepeated()) { - List list1 = (List) value1; - List list2 = (List) value2; - if (list1.size() != list2.size()) { - return false; - } - for (int i = 0; i < list1.size(); i++) { - if (!compareBytes(list1.get(i), list2.get(i))) { - return false; - } - } - } else { - // Compares a singular bytes field. - if (!compareBytes(value1, value2)) { - return false; - } - } - } else if (descriptor.isMapField()) { - if (!compareMapField(value1, value2)) { - return false; - } - } else { - // Compare non-bytes fields. - if (!value1.equals(value2)) { - return false; - } - } - } - return true; - } - - /** - * Calculates the hash code of a map field. {@code value} must be a list of - * MapEntry messages. - */ - @SuppressWarnings("unchecked") - private static int hashMapField(Object value) { - return MapFieldLite.calculateHashCodeForMap(convertMapEntryListToMap((List) value)); - } - - /** Get a hash code for given fields and values, using the given seed. */ - @SuppressWarnings("unchecked") - protected static int hashFields(int hash, Map map) { - for (Map.Entry entry : map.entrySet()) { - FieldDescriptor field = entry.getKey(); - Object value = entry.getValue(); - hash = (37 * hash) + field.getNumber(); - if (field.isMapField()) { - hash = (53 * hash) + hashMapField(value); - } else if (field.getType() != FieldDescriptor.Type.ENUM){ - hash = (53 * hash) + value.hashCode(); - } else if (field.isRepeated()) { - List list = (List) value; - hash = (53 * hash) + Internal.hashEnumList(list); - } else { - hash = (53 * hash) + Internal.hashEnum((EnumLite) value); - } - } - return hash; - } - - /** - * Package private helper method for AbstractParser to create - * UninitializedMessageException with missing field information. - */ - @Override - UninitializedMessageException newUninitializedMessageException() { - return Builder.newUninitializedMessageException(this); - } - - // ================================================================= - - /** - * A partial implementation of the {@link Message.Builder} interface which - * implements as many methods of that interface as possible in terms of - * other methods. - */ - @SuppressWarnings("unchecked") - public static abstract class Builder> - extends AbstractMessageLite.Builder - implements Message.Builder { - // The compiler produces an error if this is not declared explicitly. - @Override - public abstract BuilderType clone(); - - /** TODO(jieluo): Clear it when all subclasses have implemented this method. */ - @Override - public boolean hasOneof(OneofDescriptor oneof) { - throw new UnsupportedOperationException("hasOneof() is not implemented."); - } - - /** TODO(jieluo): Clear it when all subclasses have implemented this method. */ - @Override - public FieldDescriptor getOneofFieldDescriptor(OneofDescriptor oneof) { - throw new UnsupportedOperationException( - "getOneofFieldDescriptor() is not implemented."); - } - - /** TODO(jieluo): Clear it when all subclasses have implemented this method. */ - @Override - public BuilderType clearOneof(OneofDescriptor oneof) { - throw new UnsupportedOperationException("clearOneof() is not implemented."); - } - - @Override - public BuilderType clear() { - for (final Map.Entry entry : - getAllFields().entrySet()) { - clearField(entry.getKey()); - } - return (BuilderType) this; - } - - @Override - public List findInitializationErrors() { - return MessageReflection.findMissingFields(this); - } - - @Override - public String getInitializationErrorString() { - return MessageReflection.delimitWithCommas(findInitializationErrors()); - } - - @Override - protected BuilderType internalMergeFrom(AbstractMessageLite other) { - return mergeFrom((Message) other); - } - - @Override - public BuilderType mergeFrom(final Message other) { - if (other.getDescriptorForType() != getDescriptorForType()) { - throw new IllegalArgumentException( - "mergeFrom(Message) can only merge messages of the same type."); - } - - // Note: We don't attempt to verify that other's fields have valid - // types. Doing so would be a losing battle. We'd have to verify - // all sub-messages as well, and we'd have to make copies of all of - // them to insure that they don't change after verification (since - // the Message interface itself cannot enforce immutability of - // implementations). - // TODO(kenton): Provide a function somewhere called makeDeepCopy() - // which allows people to make secure deep copies of messages. - - for (final Map.Entry entry : - other.getAllFields().entrySet()) { - final FieldDescriptor field = entry.getKey(); - if (field.isRepeated()) { - for (final Object element : (List)entry.getValue()) { - addRepeatedField(field, element); - } - } else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - final Message existingValue = (Message)getField(field); - if (existingValue == existingValue.getDefaultInstanceForType()) { - setField(field, entry.getValue()); - } else { - setField(field, - existingValue.newBuilderForType() - .mergeFrom(existingValue) - .mergeFrom((Message)entry.getValue()) - .build()); - } - } else { - setField(field, entry.getValue()); - } - } - - mergeUnknownFields(other.getUnknownFields()); - - return (BuilderType) this; - } - - @Override - public BuilderType mergeFrom(final CodedInputStream input) - throws IOException { - return mergeFrom(input, ExtensionRegistry.getEmptyRegistry()); - } - - @Override - public BuilderType mergeFrom( - final CodedInputStream input, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - final UnknownFieldSet.Builder unknownFields = - UnknownFieldSet.newBuilder(getUnknownFields()); - while (true) { - final int tag = input.readTag(); - if (tag == 0) { - break; - } - - MessageReflection.BuilderAdapter builderAdapter = - new MessageReflection.BuilderAdapter(this); - if (!MessageReflection.mergeFieldFrom(input, unknownFields, - extensionRegistry, - getDescriptorForType(), - builderAdapter, - tag)) { - // end group tag - break; - } - } - setUnknownFields(unknownFields.build()); - return (BuilderType) this; - } - - @Override - public BuilderType mergeUnknownFields(final UnknownFieldSet unknownFields) { - setUnknownFields( - UnknownFieldSet.newBuilder(getUnknownFields()) - .mergeFrom(unknownFields) - .build()); - return (BuilderType) this; - } - - @Override - public Message.Builder getFieldBuilder(final FieldDescriptor field) { - throw new UnsupportedOperationException( - "getFieldBuilder() called on an unsupported message type."); - } - - @Override - public Message.Builder getRepeatedFieldBuilder(final FieldDescriptor field, int index) { - throw new UnsupportedOperationException( - "getRepeatedFieldBuilder() called on an unsupported message type."); - } - - @Override - public String toString() { - return TextFormat.printToString(this); - } - - /** - * Construct an UninitializedMessageException reporting missing fields in - * the given message. - */ - protected static UninitializedMessageException - newUninitializedMessageException(Message message) { - return new UninitializedMessageException( - MessageReflection.findMissingFields(message)); - } - - /** - * Used to support nested builders and called to mark this builder as clean. - * Clean builders will propagate the {@link BuilderParent#markDirty()} event - * to their parent builders, while dirty builders will not, as their parents - * should be dirty already. - * - * NOTE: Implementations that don't support nested builders don't need to - * override this method. - */ - void markClean() { - throw new IllegalStateException("Should be overridden by subclasses."); - } - - /** - * Used to support nested builders and called when this nested builder is - * no longer used by its parent builder and should release the reference - * to its parent builder. - * - * NOTE: Implementations that don't support nested builders don't need to - * override this method. - */ - void dispose() { - throw new IllegalStateException("Should be overridden by subclasses."); - } - - // =============================================================== - // The following definitions seem to be required in order to make javac - // not produce weird errors like: - // - // java/org.apache.hadoop.hbase.shaded.com.google.protobuf/DynamicMessage.java:203: types - // org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessage.Builder< - // org.apache.hadoop.hbase.shaded.com.google.protobuf.DynamicMessage.Builder> and - // org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessage.Builder< - // org.apache.hadoop.hbase.shaded.com.google.protobuf.DynamicMessage.Builder> are incompatible; both - // define mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString), but with unrelated - // return types. - // - // Strangely, these lines are only needed if javac is invoked separately - // on AbstractMessage.java and AbstractMessageLite.java. If javac is - // invoked on both simultaneously, it works. (Or maybe the important - // point is whether or not DynamicMessage.java is compiled together with - // AbstractMessageLite.java -- not sure.) I suspect this is a compiler - // bug. - - @Override - public BuilderType mergeFrom(final ByteString data) - throws InvalidProtocolBufferException { - return (BuilderType) super.mergeFrom(data); - } - - @Override - public BuilderType mergeFrom( - final ByteString data, - final ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return (BuilderType) super.mergeFrom(data, extensionRegistry); - } - - @Override - public BuilderType mergeFrom(final byte[] data) - throws InvalidProtocolBufferException { - return (BuilderType) super.mergeFrom(data); - } - - @Override - public BuilderType mergeFrom( - final byte[] data, final int off, final int len) - throws InvalidProtocolBufferException { - return (BuilderType) super.mergeFrom(data, off, len); - } - - @Override - public BuilderType mergeFrom( - final byte[] data, - final ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return (BuilderType) super.mergeFrom(data, extensionRegistry); - } - - @Override - public BuilderType mergeFrom( - final byte[] data, final int off, final int len, - final ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return (BuilderType) super.mergeFrom(data, off, len, extensionRegistry); - } - - @Override - public BuilderType mergeFrom(final InputStream input) - throws IOException { - return (BuilderType) super.mergeFrom(input); - } - - @Override - public BuilderType mergeFrom( - final InputStream input, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - return (BuilderType) super.mergeFrom(input, extensionRegistry); - } - - @Override - public boolean mergeDelimitedFrom(final InputStream input) - throws IOException { - return super.mergeDelimitedFrom(input); - } - - @Override - public boolean mergeDelimitedFrom( - final InputStream input, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - return super.mergeDelimitedFrom(input, extensionRegistry); - } - } - - /** - * @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1 - * generated code. - */ - @Deprecated - protected static int hashLong(long n) { - return (int) (n ^ (n >>> 32)); - } - // - /** - * @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1 - * generated code. - */ - @Deprecated - protected static int hashBoolean(boolean b) { - return b ? 1231 : 1237; - } - // - /** - * @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1 - * generated code. - */ - @Deprecated - protected static int hashEnum(EnumLite e) { - return e.getNumber(); - } - // - /** - * @deprecated from v3.0.0-beta-3+, for compatibility with v2.5.0 and v2.6.1 - * generated code. - */ - @Deprecated - protected static int hashEnumList(List list) { - int hash = 1; - for (EnumLite e : list) { - hash = 31 * hash + hashEnum(e); - } - return hash; - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java deleted file mode 100644 index e5b87f072c1..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java +++ /dev/null @@ -1,383 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.FilterInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Collection; - -/** - * A partial implementation of the {@link MessageLite} interface which - * implements as many methods of that interface as possible in terms of other - * methods. - * - * @author kenton@google.com Kenton Varda - */ -public abstract class AbstractMessageLite< - MessageType extends AbstractMessageLite, - BuilderType extends AbstractMessageLite.Builder> - implements MessageLite { - protected int memoizedHashCode = 0; - @Override - public ByteString toByteString() { - try { - final ByteString.CodedBuilder out = - ByteString.newCodedBuilder(getSerializedSize()); - writeTo(out.getCodedOutput()); - return out.build(); - } catch (IOException e) { - throw new RuntimeException(getSerializingExceptionMessage("ByteString"), e); - } - } - - @Override - public byte[] toByteArray() { - try { - final byte[] result = new byte[getSerializedSize()]; - final CodedOutputStream output = CodedOutputStream.newInstance(result); - writeTo(output); - output.checkNoSpaceLeft(); - return result; - } catch (IOException e) { - throw new RuntimeException(getSerializingExceptionMessage("byte array"), e); - } - } - - @Override - public void writeTo(final OutputStream output) throws IOException { - final int bufferSize = - CodedOutputStream.computePreferredBufferSize(getSerializedSize()); - final CodedOutputStream codedOutput = - CodedOutputStream.newInstance(output, bufferSize); - writeTo(codedOutput); - codedOutput.flush(); - } - - @Override - public void writeDelimitedTo(final OutputStream output) throws IOException { - final int serialized = getSerializedSize(); - final int bufferSize = CodedOutputStream.computePreferredBufferSize( - CodedOutputStream.computeRawVarint32Size(serialized) + serialized); - final CodedOutputStream codedOutput = - CodedOutputStream.newInstance(output, bufferSize); - codedOutput.writeRawVarint32(serialized); - writeTo(codedOutput); - codedOutput.flush(); - } - - - /** - * Package private helper method for AbstractParser to create - * UninitializedMessageException. - */ - UninitializedMessageException newUninitializedMessageException() { - return new UninitializedMessageException(this); - } - - private String getSerializingExceptionMessage(String target) { - return "Serializing " + getClass().getName() + " to a " + target - + " threw an IOException (should never happen)."; - } - - protected static void checkByteStringIsUtf8(ByteString byteString) - throws IllegalArgumentException { - if (!byteString.isValidUtf8()) { - throw new IllegalArgumentException("Byte string is not UTF-8."); - } - } - - protected static void addAll(final Iterable values, - final Collection list) { - Builder.addAll(values, list); - } - - /** - * A partial implementation of the {@link Message.Builder} interface which - * implements as many methods of that interface as possible in terms of - * other methods. - */ - @SuppressWarnings("unchecked") - public abstract static class Builder< - MessageType extends AbstractMessageLite, - BuilderType extends Builder> - implements MessageLite.Builder { - // The compiler produces an error if this is not declared explicitly. - @Override - public abstract BuilderType clone(); - - @Override - public BuilderType mergeFrom(final CodedInputStream input) throws IOException { - return mergeFrom(input, ExtensionRegistryLite.getEmptyRegistry()); - } - - // Re-defined here for return type covariance. - @Override - public abstract BuilderType mergeFrom( - final CodedInputStream input, final ExtensionRegistryLite extensionRegistry) - throws IOException; - - @Override - public BuilderType mergeFrom(final ByteString data) throws InvalidProtocolBufferException { - try { - final CodedInputStream input = data.newCodedInput(); - mergeFrom(input); - input.checkLastTagWas(0); - return (BuilderType) this; - } catch (InvalidProtocolBufferException e) { - throw e; - } catch (IOException e) { - throw new RuntimeException(getReadingExceptionMessage("ByteString"), e); - } - } - - @Override - public BuilderType mergeFrom( - final ByteString data, final ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - try { - final CodedInputStream input = data.newCodedInput(); - mergeFrom(input, extensionRegistry); - input.checkLastTagWas(0); - return (BuilderType) this; - } catch (InvalidProtocolBufferException e) { - throw e; - } catch (IOException e) { - throw new RuntimeException(getReadingExceptionMessage("ByteString"), e); - } - } - - @Override - public BuilderType mergeFrom(final byte[] data) throws InvalidProtocolBufferException { - return mergeFrom(data, 0, data.length); - } - - @Override - public BuilderType mergeFrom(final byte[] data, final int off, final int len) - throws InvalidProtocolBufferException { - try { - final CodedInputStream input = - CodedInputStream.newInstance(data, off, len); - mergeFrom(input); - input.checkLastTagWas(0); - return (BuilderType) this; - } catch (InvalidProtocolBufferException e) { - throw e; - } catch (IOException e) { - throw new RuntimeException(getReadingExceptionMessage("byte array"), e); - } - } - - @Override - public BuilderType mergeFrom(final byte[] data, final ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return mergeFrom(data, 0, data.length, extensionRegistry); - } - - @Override - public BuilderType mergeFrom( - final byte[] data, - final int off, - final int len, - final ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - try { - final CodedInputStream input = - CodedInputStream.newInstance(data, off, len); - mergeFrom(input, extensionRegistry); - input.checkLastTagWas(0); - return (BuilderType) this; - } catch (InvalidProtocolBufferException e) { - throw e; - } catch (IOException e) { - throw new RuntimeException(getReadingExceptionMessage("byte array"), e); - } - } - - @Override - public BuilderType mergeFrom(final InputStream input) throws IOException { - final CodedInputStream codedInput = CodedInputStream.newInstance(input); - mergeFrom(codedInput); - codedInput.checkLastTagWas(0); - return (BuilderType) this; - } - - @Override - public BuilderType mergeFrom( - final InputStream input, final ExtensionRegistryLite extensionRegistry) throws IOException { - final CodedInputStream codedInput = CodedInputStream.newInstance(input); - mergeFrom(codedInput, extensionRegistry); - codedInput.checkLastTagWas(0); - return (BuilderType) this; - } - - /** - * An InputStream implementations which reads from some other InputStream - * but is limited to a particular number of bytes. Used by - * mergeDelimitedFrom(). This is intentionally package-private so that - * UnknownFieldSet can share it. - */ - static final class LimitedInputStream extends FilterInputStream { - private int limit; - - LimitedInputStream(InputStream in, int limit) { - super(in); - this.limit = limit; - } - - @Override - public int available() throws IOException { - return Math.min(super.available(), limit); - } - - @Override - public int read() throws IOException { - if (limit <= 0) { - return -1; - } - final int result = super.read(); - if (result >= 0) { - --limit; - } - return result; - } - - @Override - public int read(final byte[] b, final int off, int len) - throws IOException { - if (limit <= 0) { - return -1; - } - len = Math.min(len, limit); - final int result = super.read(b, off, len); - if (result >= 0) { - limit -= result; - } - return result; - } - - @Override - public long skip(final long n) throws IOException { - final long result = super.skip(Math.min(n, limit)); - if (result >= 0) { - limit -= result; - } - return result; - } - } - - @Override - public boolean mergeDelimitedFrom( - final InputStream input, final ExtensionRegistryLite extensionRegistry) throws IOException { - final int firstByte = input.read(); - if (firstByte == -1) { - return false; - } - final int size = CodedInputStream.readRawVarint32(firstByte, input); - final InputStream limitedInput = new LimitedInputStream(input, size); - mergeFrom(limitedInput, extensionRegistry); - return true; - } - - @Override - public boolean mergeDelimitedFrom(final InputStream input) throws IOException { - return mergeDelimitedFrom(input, - ExtensionRegistryLite.getEmptyRegistry()); - } - - @Override - @SuppressWarnings("unchecked") // isInstance takes care of this - public BuilderType mergeFrom(final MessageLite other) { - if (!getDefaultInstanceForType().getClass().isInstance(other)) { - throw new IllegalArgumentException( - "mergeFrom(MessageLite) can only merge messages of the same type."); - } - - return internalMergeFrom((MessageType) other); - } - - protected abstract BuilderType internalMergeFrom(MessageType message); - - private String getReadingExceptionMessage(String target) { - return "Reading " + getClass().getName() + " from a " + target - + " threw an IOException (should never happen)."; - } - - /** - * Construct an UninitializedMessageException reporting missing fields in - * the given message. - */ - protected static UninitializedMessageException - newUninitializedMessageException(MessageLite message) { - return new UninitializedMessageException(message); - } - - /** - * Adds the {@code values} to the {@code list}. This is a helper method - * used by generated code. Users should ignore it. - * - * @throws NullPointerException if {@code values} or any of the elements of - * {@code values} is null. When that happens, some elements of - * {@code values} may have already been added to the result {@code list}. - */ - protected static void addAll(final Iterable values, - final Collection list) { - if (values == null) { - throw new NullPointerException(); - } - if (values instanceof LazyStringList) { - // For StringOrByteStringLists, check the underlying elements to avoid - // forcing conversions of ByteStrings to Strings. - checkForNullValues(((LazyStringList) values).getUnderlyingElements()); - list.addAll((Collection) values); - } else if (values instanceof Collection) { - checkForNullValues(values); - list.addAll((Collection) values); - } else { - for (final T value : values) { - if (value == null) { - throw new NullPointerException(); - } - list.add(value); - } - } - } - - private static void checkForNullValues(final Iterable values) { - for (final Object value : values) { - if (value == null) { - throw new NullPointerException(); - } - } - } - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java deleted file mode 100644 index 5fe6a22d247..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java +++ /dev/null @@ -1,258 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.LimitedInputStream; - -import java.io.IOException; -import java.io.InputStream; - -/** - * A partial implementation of the {@link Parser} interface which implements - * as many methods of that interface as possible in terms of other methods. - * - * Note: This class implements all the convenience methods in the - * {@link Parser} interface. See {@link Parser} for related javadocs. - * Subclasses need to implement - * {@link Parser#parsePartialFrom(CodedInputStream, ExtensionRegistryLite)} - * - * @author liujisi@google.com (Pherl Liu) - */ -public abstract class AbstractParser - implements Parser { - /** - * Creates an UninitializedMessageException for MessageType. - */ - private UninitializedMessageException - newUninitializedMessageException(MessageType message) { - if (message instanceof AbstractMessageLite) { - return ((AbstractMessageLite) message).newUninitializedMessageException(); - } - return new UninitializedMessageException(message); - } - - /** - * Helper method to check if message is initialized. - * - * @throws InvalidProtocolBufferException if it is not initialized. - * @return The message to check. - */ - private MessageType checkMessageInitialized(MessageType message) - throws InvalidProtocolBufferException { - if (message != null && !message.isInitialized()) { - throw newUninitializedMessageException(message) - .asInvalidProtocolBufferException() - .setUnfinishedMessage(message); - } - return message; - } - - private static final ExtensionRegistryLite EMPTY_REGISTRY - = ExtensionRegistryLite.getEmptyRegistry(); - - @Override - public MessageType parsePartialFrom(CodedInputStream input) - throws InvalidProtocolBufferException { - return parsePartialFrom(input, EMPTY_REGISTRY); - } - - @Override - public MessageType parseFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return checkMessageInitialized( - parsePartialFrom(input, extensionRegistry)); - } - - @Override - public MessageType parseFrom(CodedInputStream input) throws InvalidProtocolBufferException { - return parseFrom(input, EMPTY_REGISTRY); - } - - @Override - public MessageType parsePartialFrom(ByteString data, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - MessageType message; - try { - CodedInputStream input = data.newCodedInput(); - message = parsePartialFrom(input, extensionRegistry); - try { - input.checkLastTagWas(0); - } catch (InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(message); - } - return message; - } catch (InvalidProtocolBufferException e) { - throw e; - } - } - - @Override - public MessageType parsePartialFrom(ByteString data) throws InvalidProtocolBufferException { - return parsePartialFrom(data, EMPTY_REGISTRY); - } - - @Override - public MessageType parseFrom(ByteString data, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return checkMessageInitialized(parsePartialFrom(data, extensionRegistry)); - } - - @Override - public MessageType parseFrom(ByteString data) throws InvalidProtocolBufferException { - return parseFrom(data, EMPTY_REGISTRY); - } - - @Override - public MessageType parsePartialFrom( - byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - try { - CodedInputStream input = CodedInputStream.newInstance(data, off, len); - MessageType message = parsePartialFrom(input, extensionRegistry); - try { - input.checkLastTagWas(0); - } catch (InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(message); - } - return message; - } catch (InvalidProtocolBufferException e) { - throw e; - } - } - - @Override - public MessageType parsePartialFrom(byte[] data, int off, int len) - throws InvalidProtocolBufferException { - return parsePartialFrom(data, off, len, EMPTY_REGISTRY); - } - - @Override - public MessageType parsePartialFrom(byte[] data, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return parsePartialFrom(data, 0, data.length, extensionRegistry); - } - - @Override - public MessageType parsePartialFrom(byte[] data) throws InvalidProtocolBufferException { - return parsePartialFrom(data, 0, data.length, EMPTY_REGISTRY); - } - - @Override - public MessageType parseFrom( - byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return checkMessageInitialized( - parsePartialFrom(data, off, len, extensionRegistry)); - } - - @Override - public MessageType parseFrom(byte[] data, int off, int len) - throws InvalidProtocolBufferException { - return parseFrom(data, off, len, EMPTY_REGISTRY); - } - - @Override - public MessageType parseFrom(byte[] data, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return parseFrom(data, 0, data.length, extensionRegistry); - } - - @Override - public MessageType parseFrom(byte[] data) throws InvalidProtocolBufferException { - return parseFrom(data, EMPTY_REGISTRY); - } - - @Override - public MessageType parsePartialFrom(InputStream input, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - CodedInputStream codedInput = CodedInputStream.newInstance(input); - MessageType message = parsePartialFrom(codedInput, extensionRegistry); - try { - codedInput.checkLastTagWas(0); - } catch (InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(message); - } - return message; - } - - @Override - public MessageType parsePartialFrom(InputStream input) throws InvalidProtocolBufferException { - return parsePartialFrom(input, EMPTY_REGISTRY); - } - - @Override - public MessageType parseFrom(InputStream input, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return checkMessageInitialized( - parsePartialFrom(input, extensionRegistry)); - } - - @Override - public MessageType parseFrom(InputStream input) throws InvalidProtocolBufferException { - return parseFrom(input, EMPTY_REGISTRY); - } - - @Override - public MessageType parsePartialDelimitedFrom( - InputStream input, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - int size; - try { - int firstByte = input.read(); - if (firstByte == -1) { - return null; - } - size = CodedInputStream.readRawVarint32(firstByte, input); - } catch (IOException e) { - throw new InvalidProtocolBufferException(e); - } - InputStream limitedInput = new LimitedInputStream(input, size); - return parsePartialFrom(limitedInput, extensionRegistry); - } - - @Override - public MessageType parsePartialDelimitedFrom(InputStream input) - throws InvalidProtocolBufferException { - return parsePartialDelimitedFrom(input, EMPTY_REGISTRY); - } - - @Override - public MessageType parseDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry) - throws InvalidProtocolBufferException { - return checkMessageInitialized( - parsePartialDelimitedFrom(input, extensionRegistry)); - } - - @Override - public MessageType parseDelimitedFrom(InputStream input) throws InvalidProtocolBufferException { - return parseDelimitedFrom(input, EMPTY_REGISTRY); - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractProtobufList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractProtobufList.java deleted file mode 100644 index 87b0cdce16a..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractProtobufList.java +++ /dev/null @@ -1,180 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.ProtobufList; - -import java.util.AbstractList; -import java.util.Collection; -import java.util.List; -import java.util.RandomAccess; - -/** - * An abstract implementation of {@link ProtobufList} which manages mutability semantics. All mutate - * methods must check if the list is mutable before proceeding. Subclasses must invoke - * {@link #ensureIsMutable()} manually when overriding those methods. - *

- * This implementation assumes all subclasses are array based, supporting random access. - */ -abstract class AbstractProtobufList extends AbstractList implements ProtobufList { - - protected static final int DEFAULT_CAPACITY = 10; - - /** - * Whether or not this list is modifiable. - */ - private boolean isMutable; - - /** - * Constructs a mutable list by default. - */ - AbstractProtobufList() { - isMutable = true; - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } - if (!(o instanceof List)) { - return false; - } - // Handle lists that do not support RandomAccess as efficiently as possible by using an iterator - // based approach in our super class. Otherwise our index based approach will avoid those - // allocations. - if (!(o instanceof RandomAccess)) { - return super.equals(o); - } - - List other = (List) o; - final int size = size(); - if (size != other.size()) { - return false; - } - for (int i = 0; i < size; i++) { - if (!get(i).equals(other.get(i))) { - return false; - } - } - return true; - } - - @Override - public int hashCode() { - final int size = size(); - int hashCode = 1; - for (int i = 0; i < size; i++) { - hashCode = (31 * hashCode) + get(i).hashCode(); - } - return hashCode; - } - - @Override - public boolean add(E e) { - ensureIsMutable(); - return super.add(e); - } - - @Override - public void add(int index, E element) { - ensureIsMutable(); - super.add(index, element); - } - - @Override - public boolean addAll(Collection c) { - ensureIsMutable(); - return super.addAll(c); - } - - @Override - public boolean addAll(int index, Collection c) { - ensureIsMutable(); - return super.addAll(index, c); - } - - @Override - public void clear() { - ensureIsMutable(); - super.clear(); - } - - @Override - public boolean isModifiable() { - return isMutable; - } - - @Override - public final void makeImmutable() { - isMutable = false; - } - - @Override - public E remove(int index) { - ensureIsMutable(); - return super.remove(index); - } - - @Override - public boolean remove(Object o) { - ensureIsMutable(); - return super.remove(o); - } - - @Override - public boolean removeAll(Collection c) { - ensureIsMutable(); - return super.removeAll(c); - } - - @Override - public boolean retainAll(Collection c) { - ensureIsMutable(); - return super.retainAll(c); - } - - @Override - public E set(int index, E element) { - ensureIsMutable(); - return super.set(index, element); - } - - /** - * Throws an {@link UnsupportedOperationException} if the list is immutable. Subclasses are - * responsible for invoking this method on mutate operations. - */ - protected void ensureIsMutable() { - if (!isMutable) { - throw new UnsupportedOperationException(); - } - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java deleted file mode 100644 index bf8e8526557..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java +++ /dev/null @@ -1,899 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/any.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *

- * `Any` contains an arbitrary serialized protocol buffer message along with a
- * URL that describes the type of the serialized message.
- * Protobuf library provides support to pack/unpack Any values in the form
- * of utility functions or additional generated methods of the Any type.
- * Example 1: Pack and unpack a message in C++.
- *     Foo foo = ...;
- *     Any any;
- *     any.PackFrom(foo);
- *     ...
- *     if (any.UnpackTo(&foo)) {
- *       ...
- *     }
- * Example 2: Pack and unpack a message in Java.
- *     Foo foo = ...;
- *     Any any = Any.pack(foo);
- *     ...
- *     if (any.is(Foo.class)) {
- *       foo = any.unpack(Foo.class);
- *     }
- *  Example 3: Pack and unpack a message in Python.
- *     foo = Foo(...)
- *     any = Any()
- *     any.Pack(foo)
- *     ...
- *     if any.Is(Foo.DESCRIPTOR):
- *       any.Unpack(foo)
- *       ...
- * The pack methods provided by protobuf library will by default use
- * 'type.googleapis.com/full.type.name' as the type URL and the unpack
- * methods only use the fully qualified type name after the last '/'
- * in the type URL, for example "foo.bar.com/x/y.z" will yield type
- * name "y.z".
- * JSON
- * ====
- * The JSON representation of an `Any` value uses the regular
- * representation of the deserialized, embedded message, with an
- * additional field `@type` which contains the type URL. Example:
- *     package google.profile;
- *     message Person {
- *       string first_name = 1;
- *       string last_name = 2;
- *     }
- *     {
- *       "@type": "type.googleapis.com/google.profile.Person",
- *       "firstName": <string>,
- *       "lastName": <string>
- *     }
- * If the embedded message type is well-known and has a custom JSON
- * representation, that representation will be embedded adding a field
- * `value` which holds the custom JSON in addition to the `@type`
- * field. Example (for message [google.protobuf.Duration][]):
- *     {
- *       "@type": "type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration",
- *       "value": "1.212s"
- *     }
- * 
- * - * Protobuf type {@code google.protobuf.Any} - */ -public final class Any extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.Any) - AnyOrBuilder { - // Use Any.newBuilder() to construct. - private Any(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Any() { - typeUrl_ = ""; - value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private Any( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - typeUrl_ = s; - break; - } - case 18: { - - value_ = input.readBytes(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.Builder.class); - } - - private static String getTypeUrl( - java.lang.String typeUrlPrefix, - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor descriptor) { - return typeUrlPrefix.endsWith("/") - ? typeUrlPrefix + descriptor.getFullName() - : typeUrlPrefix + "/" + descriptor.getFullName(); - } - - private static String getTypeNameFromTypeUrl( - java.lang.String typeUrl) { - int pos = typeUrl.lastIndexOf('/'); - return pos == -1 ? "" : typeUrl.substring(pos + 1); - } - - public static Any pack( - T message) { - return Any.newBuilder() - .setTypeUrl(getTypeUrl("type.googleapis.com", - message.getDescriptorForType())) - .setValue(message.toByteString()) - .build(); - } - - /** - * Packs a message using the given type URL prefix. The type URL will - * be constructed by concatenating the message type's full name to the - * prefix with an optional "/" separator if the prefix doesn't end - * with "/" already. - */ - public static Any pack( - T message, java.lang.String typeUrlPrefix) { - return Any.newBuilder() - .setTypeUrl(getTypeUrl(typeUrlPrefix, - message.getDescriptorForType())) - .setValue(message.toByteString()) - .build(); - } - - public boolean is( - java.lang.Class clazz) { - T defaultInstance = - org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.getDefaultInstance(clazz); - return getTypeNameFromTypeUrl(getTypeUrl()).equals( - defaultInstance.getDescriptorForType().getFullName()); - } - - private volatile org.apache.hadoop.hbase.shaded.com.google.protobuf.Message cachedUnpackValue; - - public T unpack( - java.lang.Class clazz) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - if (!is(clazz)) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - "Type of the Any message does not match the given class."); - } - if (cachedUnpackValue != null) { - return (T) cachedUnpackValue; - } - T defaultInstance = - org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.getDefaultInstance(clazz); - T result = (T) defaultInstance.getParserForType() - .parseFrom(getValue()); - cachedUnpackValue = result; - return result; - } - public static final int TYPE_URL_FIELD_NUMBER = 1; - private volatile java.lang.Object typeUrl_; - /** - *
-   * A URL/resource name whose content describes the type of the
-   * serialized protocol buffer message.
-   * For URLs which use the scheme `http`, `https`, or no scheme, the
-   * following restrictions and interpretations apply:
-   * * If no scheme is provided, `https` is assumed.
-   * * The last segment of the URL's path must represent the fully
-   *   qualified name of the type (as in `path/google.protobuf.Duration`).
-   *   The name should be in a canonical form (e.g., leading "." is
-   *   not accepted).
-   * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-   *   value in binary format, or produce an error.
-   * * Applications are allowed to cache lookup results based on the
-   *   URL, or have them precompiled into a binary to avoid any
-   *   lookup. Therefore, binary compatibility needs to be preserved
-   *   on changes to types. (Use versioned type names to manage
-   *   breaking changes.)
-   * Schemes other than `http`, `https` (or the empty scheme) might be
-   * used with implementation specific semantics.
-   * 
- * - * string type_url = 1; - */ - public java.lang.String getTypeUrl() { - java.lang.Object ref = typeUrl_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - typeUrl_ = s; - return s; - } - } - /** - *
-   * A URL/resource name whose content describes the type of the
-   * serialized protocol buffer message.
-   * For URLs which use the scheme `http`, `https`, or no scheme, the
-   * following restrictions and interpretations apply:
-   * * If no scheme is provided, `https` is assumed.
-   * * The last segment of the URL's path must represent the fully
-   *   qualified name of the type (as in `path/google.protobuf.Duration`).
-   *   The name should be in a canonical form (e.g., leading "." is
-   *   not accepted).
-   * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-   *   value in binary format, or produce an error.
-   * * Applications are allowed to cache lookup results based on the
-   *   URL, or have them precompiled into a binary to avoid any
-   *   lookup. Therefore, binary compatibility needs to be preserved
-   *   on changes to types. (Use versioned type names to manage
-   *   breaking changes.)
-   * Schemes other than `http`, `https` (or the empty scheme) might be
-   * used with implementation specific semantics.
-   * 
- * - * string type_url = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getTypeUrlBytes() { - java.lang.Object ref = typeUrl_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - typeUrl_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - public static final int VALUE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_; - /** - *
-   * Must be a valid serialized protocol buffer of the above specified type.
-   * 
- * - * bytes value = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { - return value_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getTypeUrlBytes().isEmpty()) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, typeUrl_); - } - if (!value_.isEmpty()) { - output.writeBytes(2, value_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getTypeUrlBytes().isEmpty()) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, typeUrl_); - } - if (!value_.isEmpty()) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeBytesSize(2, value_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Any)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.Any other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) obj; - - boolean result = true; - result = result && getTypeUrl() - .equals(other.getTypeUrl()); - result = result && getValue() - .equals(other.getValue()); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + TYPE_URL_FIELD_NUMBER; - hash = (53 * hash) + getTypeUrl().hashCode(); - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Any prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-   * `Any` contains an arbitrary serialized protocol buffer message along with a
-   * URL that describes the type of the serialized message.
-   * Protobuf library provides support to pack/unpack Any values in the form
-   * of utility functions or additional generated methods of the Any type.
-   * Example 1: Pack and unpack a message in C++.
-   *     Foo foo = ...;
-   *     Any any;
-   *     any.PackFrom(foo);
-   *     ...
-   *     if (any.UnpackTo(&foo)) {
-   *       ...
-   *     }
-   * Example 2: Pack and unpack a message in Java.
-   *     Foo foo = ...;
-   *     Any any = Any.pack(foo);
-   *     ...
-   *     if (any.is(Foo.class)) {
-   *       foo = any.unpack(Foo.class);
-   *     }
-   *  Example 3: Pack and unpack a message in Python.
-   *     foo = Foo(...)
-   *     any = Any()
-   *     any.Pack(foo)
-   *     ...
-   *     if any.Is(Foo.DESCRIPTOR):
-   *       any.Unpack(foo)
-   *       ...
-   * The pack methods provided by protobuf library will by default use
-   * 'type.googleapis.com/full.type.name' as the type URL and the unpack
-   * methods only use the fully qualified type name after the last '/'
-   * in the type URL, for example "foo.bar.com/x/y.z" will yield type
-   * name "y.z".
-   * JSON
-   * ====
-   * The JSON representation of an `Any` value uses the regular
-   * representation of the deserialized, embedded message, with an
-   * additional field `@type` which contains the type URL. Example:
-   *     package google.profile;
-   *     message Person {
-   *       string first_name = 1;
-   *       string last_name = 2;
-   *     }
-   *     {
-   *       "@type": "type.googleapis.com/google.profile.Person",
-   *       "firstName": <string>,
-   *       "lastName": <string>
-   *     }
-   * If the embedded message type is well-known and has a custom JSON
-   * representation, that representation will be embedded adding a field
-   * `value` which holds the custom JSON in addition to the `@type`
-   * field. Example (for message [google.protobuf.Duration][]):
-   *     {
-   *       "@type": "type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration",
-   *       "value": "1.212s"
-   *     }
-   * 
- * - * Protobuf type {@code google.protobuf.Any} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.Any) - org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - typeUrl_ = ""; - - value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; - - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Any result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Any result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Any(this); - result.typeUrl_ = typeUrl_; - result.value_ = value_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Any)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Any other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.getDefaultInstance()) return this; - if (!other.getTypeUrl().isEmpty()) { - typeUrl_ = other.typeUrl_; - onChanged(); - } - if (other.getValue() != org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY) { - setValue(other.getValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object typeUrl_ = ""; - /** - *
-     * A URL/resource name whose content describes the type of the
-     * serialized protocol buffer message.
-     * For URLs which use the scheme `http`, `https`, or no scheme, the
-     * following restrictions and interpretations apply:
-     * * If no scheme is provided, `https` is assumed.
-     * * The last segment of the URL's path must represent the fully
-     *   qualified name of the type (as in `path/google.protobuf.Duration`).
-     *   The name should be in a canonical form (e.g., leading "." is
-     *   not accepted).
-     * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-     *   value in binary format, or produce an error.
-     * * Applications are allowed to cache lookup results based on the
-     *   URL, or have them precompiled into a binary to avoid any
-     *   lookup. Therefore, binary compatibility needs to be preserved
-     *   on changes to types. (Use versioned type names to manage
-     *   breaking changes.)
-     * Schemes other than `http`, `https` (or the empty scheme) might be
-     * used with implementation specific semantics.
-     * 
- * - * string type_url = 1; - */ - public java.lang.String getTypeUrl() { - java.lang.Object ref = typeUrl_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - typeUrl_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-     * A URL/resource name whose content describes the type of the
-     * serialized protocol buffer message.
-     * For URLs which use the scheme `http`, `https`, or no scheme, the
-     * following restrictions and interpretations apply:
-     * * If no scheme is provided, `https` is assumed.
-     * * The last segment of the URL's path must represent the fully
-     *   qualified name of the type (as in `path/google.protobuf.Duration`).
-     *   The name should be in a canonical form (e.g., leading "." is
-     *   not accepted).
-     * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-     *   value in binary format, or produce an error.
-     * * Applications are allowed to cache lookup results based on the
-     *   URL, or have them precompiled into a binary to avoid any
-     *   lookup. Therefore, binary compatibility needs to be preserved
-     *   on changes to types. (Use versioned type names to manage
-     *   breaking changes.)
-     * Schemes other than `http`, `https` (or the empty scheme) might be
-     * used with implementation specific semantics.
-     * 
- * - * string type_url = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getTypeUrlBytes() { - java.lang.Object ref = typeUrl_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - typeUrl_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - *
-     * A URL/resource name whose content describes the type of the
-     * serialized protocol buffer message.
-     * For URLs which use the scheme `http`, `https`, or no scheme, the
-     * following restrictions and interpretations apply:
-     * * If no scheme is provided, `https` is assumed.
-     * * The last segment of the URL's path must represent the fully
-     *   qualified name of the type (as in `path/google.protobuf.Duration`).
-     *   The name should be in a canonical form (e.g., leading "." is
-     *   not accepted).
-     * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-     *   value in binary format, or produce an error.
-     * * Applications are allowed to cache lookup results based on the
-     *   URL, or have them precompiled into a binary to avoid any
-     *   lookup. Therefore, binary compatibility needs to be preserved
-     *   on changes to types. (Use versioned type names to manage
-     *   breaking changes.)
-     * Schemes other than `http`, `https` (or the empty scheme) might be
-     * used with implementation specific semantics.
-     * 
- * - * string type_url = 1; - */ - public Builder setTypeUrl( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - typeUrl_ = value; - onChanged(); - return this; - } - /** - *
-     * A URL/resource name whose content describes the type of the
-     * serialized protocol buffer message.
-     * For URLs which use the scheme `http`, `https`, or no scheme, the
-     * following restrictions and interpretations apply:
-     * * If no scheme is provided, `https` is assumed.
-     * * The last segment of the URL's path must represent the fully
-     *   qualified name of the type (as in `path/google.protobuf.Duration`).
-     *   The name should be in a canonical form (e.g., leading "." is
-     *   not accepted).
-     * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-     *   value in binary format, or produce an error.
-     * * Applications are allowed to cache lookup results based on the
-     *   URL, or have them precompiled into a binary to avoid any
-     *   lookup. Therefore, binary compatibility needs to be preserved
-     *   on changes to types. (Use versioned type names to manage
-     *   breaking changes.)
-     * Schemes other than `http`, `https` (or the empty scheme) might be
-     * used with implementation specific semantics.
-     * 
- * - * string type_url = 1; - */ - public Builder clearTypeUrl() { - - typeUrl_ = getDefaultInstance().getTypeUrl(); - onChanged(); - return this; - } - /** - *
-     * A URL/resource name whose content describes the type of the
-     * serialized protocol buffer message.
-     * For URLs which use the scheme `http`, `https`, or no scheme, the
-     * following restrictions and interpretations apply:
-     * * If no scheme is provided, `https` is assumed.
-     * * The last segment of the URL's path must represent the fully
-     *   qualified name of the type (as in `path/google.protobuf.Duration`).
-     *   The name should be in a canonical form (e.g., leading "." is
-     *   not accepted).
-     * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-     *   value in binary format, or produce an error.
-     * * Applications are allowed to cache lookup results based on the
-     *   URL, or have them precompiled into a binary to avoid any
-     *   lookup. Therefore, binary compatibility needs to be preserved
-     *   on changes to types. (Use versioned type names to manage
-     *   breaking changes.)
-     * Schemes other than `http`, `https` (or the empty scheme) might be
-     * used with implementation specific semantics.
-     * 
- * - * string type_url = 1; - */ - public Builder setTypeUrlBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - typeUrl_ = value; - onChanged(); - return this; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; - /** - *
-     * Must be a valid serialized protocol buffer of the above specified type.
-     * 
- * - * bytes value = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { - return value_; - } - /** - *
-     * Must be a valid serialized protocol buffer of the above specified type.
-     * 
- * - * bytes value = 2; - */ - public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - - value_ = value; - onChanged(); - return this; - } - /** - *
-     * Must be a valid serialized protocol buffer of the above specified type.
-     * 
- * - * bytes value = 2; - */ - public Builder clearValue() { - - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.Any) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.Any) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Any DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Any(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public Any parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new Any(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyOrBuilder.java deleted file mode 100644 index 80981c8ae5e..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyOrBuilder.java +++ /dev/null @@ -1,70 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/any.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public interface AnyOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.Any) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-   * A URL/resource name whose content describes the type of the
-   * serialized protocol buffer message.
-   * For URLs which use the scheme `http`, `https`, or no scheme, the
-   * following restrictions and interpretations apply:
-   * * If no scheme is provided, `https` is assumed.
-   * * The last segment of the URL's path must represent the fully
-   *   qualified name of the type (as in `path/google.protobuf.Duration`).
-   *   The name should be in a canonical form (e.g., leading "." is
-   *   not accepted).
-   * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-   *   value in binary format, or produce an error.
-   * * Applications are allowed to cache lookup results based on the
-   *   URL, or have them precompiled into a binary to avoid any
-   *   lookup. Therefore, binary compatibility needs to be preserved
-   *   on changes to types. (Use versioned type names to manage
-   *   breaking changes.)
-   * Schemes other than `http`, `https` (or the empty scheme) might be
-   * used with implementation specific semantics.
-   * 
- * - * string type_url = 1; - */ - java.lang.String getTypeUrl(); - /** - *
-   * A URL/resource name whose content describes the type of the
-   * serialized protocol buffer message.
-   * For URLs which use the scheme `http`, `https`, or no scheme, the
-   * following restrictions and interpretations apply:
-   * * If no scheme is provided, `https` is assumed.
-   * * The last segment of the URL's path must represent the fully
-   *   qualified name of the type (as in `path/google.protobuf.Duration`).
-   *   The name should be in a canonical form (e.g., leading "." is
-   *   not accepted).
-   * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
-   *   value in binary format, or produce an error.
-   * * Applications are allowed to cache lookup results based on the
-   *   URL, or have them precompiled into a binary to avoid any
-   *   lookup. Therefore, binary compatibility needs to be preserved
-   *   on changes to types. (Use versioned type names to manage
-   *   breaking changes.)
-   * Schemes other than `http`, `https` (or the empty scheme) might be
-   * used with implementation specific semantics.
-   * 
- * - * string type_url = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getTypeUrlBytes(); - - /** - *
-   * Must be a valid serialized protocol buffer of the above specified type.
-   * 
- * - * bytes value = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue(); -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyProto.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyProto.java deleted file mode 100644 index cc674c555d6..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AnyProto.java +++ /dev/null @@ -1,59 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/any.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public final class AnyProto { - private AnyProto() {} - public static void registerAllExtensions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { - } - - public static void registerAllExtensions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); - } - static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - internal_static_google_protobuf_Any_descriptor; - static final - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_google_protobuf_Any_fieldAccessorTable; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\031google/protobuf/any.proto\022\017google.prot" + - "obuf\"&\n\003Any\022\020\n\010type_url\030\001 \001(\t\022\r\n\005value\030\002" + - " \001(\014Bo\n\023com.google.protobufB\010AnyProtoP\001Z" + - "%github.com/golang/protobuf/ptypes/any\242\002" + - "\003GPB\252\002\036Google.Protobuf.WellKnownTypesb\006p" + - "roto3" - }; - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - internal_static_google_protobuf_Any_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_protobuf_Any_fieldAccessorTable = new - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_google_protobuf_Any_descriptor, - new java.lang.String[] { "TypeUrl", "Value", }); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java deleted file mode 100644 index 3265158e22a..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java +++ /dev/null @@ -1,2473 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/api.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *
- * Api is a light-weight descriptor for a protocol buffer service.
- * 
- * - * Protobuf type {@code google.protobuf.Api} - */ -public final class Api extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.Api) - ApiOrBuilder { - // Use Api.newBuilder() to construct. - private Api(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Api() { - name_ = ""; - methods_ = java.util.Collections.emptyList(); - options_ = java.util.Collections.emptyList(); - version_ = ""; - mixins_ = java.util.Collections.emptyList(); - syntax_ = 0; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private Api( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - java.lang.String s = input.readStringRequireUtf8(); - - name_ = s; - break; - } - case 18: { - if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - methods_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000002; - } - methods_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.parser(), extensionRegistry)); - break; - } - case 26: { - if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - options_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000004; - } - options_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.parser(), extensionRegistry)); - break; - } - case 34: { - java.lang.String s = input.readStringRequireUtf8(); - - version_ = s; - break; - } - case 42: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder subBuilder = null; - if (sourceContext_ != null) { - subBuilder = sourceContext_.toBuilder(); - } - sourceContext_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(sourceContext_); - sourceContext_ = subBuilder.buildPartial(); - } - - break; - } - case 50: { - if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - mixins_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000020; - } - mixins_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.parser(), extensionRegistry)); - break; - } - case 56: { - int rawValue = input.readEnum(); - - syntax_ = rawValue; - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - methods_ = java.util.Collections.unmodifiableList(methods_); - } - if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - options_ = java.util.Collections.unmodifiableList(options_); - } - if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - mixins_ = java.util.Collections.unmodifiableList(mixins_); - } - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.ApiProto.internal_static_google_protobuf_Api_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.ApiProto.internal_static_google_protobuf_Api_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.Builder.class); - } - - private int bitField0_; - public static final int NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object name_; - /** - *
-   * The fully qualified name of this api, including package name
-   * followed by the api's simple name.
-   * 
- * - * string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } - } - /** - *
-   * The fully qualified name of this api, including package name
-   * followed by the api's simple name.
-   * 
- * - * string name = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - public static final int METHODS_FIELD_NUMBER = 2; - private java.util.List methods_; - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public java.util.List getMethodsList() { - return methods_; - } - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public java.util.List - getMethodsOrBuilderList() { - return methods_; - } - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public int getMethodsCount() { - return methods_.size(); - } - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Method getMethods(int index) { - return methods_.get(index); - } - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder getMethodsOrBuilder( - int index) { - return methods_.get(index); - } - - public static final int OPTIONS_FIELD_NUMBER = 3; - private java.util.List options_; - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public java.util.List getOptionsList() { - return options_; - } - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public java.util.List - getOptionsOrBuilderList() { - return options_; - } - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public int getOptionsCount() { - return options_.size(); - } - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index) { - return options_.get(index); - } - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder( - int index) { - return options_.get(index); - } - - public static final int VERSION_FIELD_NUMBER = 4; - private volatile java.lang.Object version_; - /** - *
-   * A version string for this api. If specified, must have the form
-   * `major-version.minor-version`, as in `1.10`. If the minor version
-   * is omitted, it defaults to zero. If the entire version field is
-   * empty, the major version is derived from the package name, as
-   * outlined below. If the field is not empty, the version in the
-   * package name will be verified to be consistent with what is
-   * provided here.
-   * The versioning schema uses [semantic
-   * versioning](http://semver.org) where the major version number
-   * indicates a breaking change and the minor version an additive,
-   * non-breaking change. Both version numbers are signals to users
-   * what to expect from different versions, and should be carefully
-   * chosen based on the product plan.
-   * The major version is also reflected in the package name of the
-   * API, which must end in `v<major-version>`, as in
-   * `google.feature.v1`. For major versions 0 and 1, the suffix can
-   * be omitted. Zero major versions must only be used for
-   * experimental, none-GA apis.
-   * 
- * - * string version = 4; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } - } - /** - *
-   * A version string for this api. If specified, must have the form
-   * `major-version.minor-version`, as in `1.10`. If the minor version
-   * is omitted, it defaults to zero. If the entire version field is
-   * empty, the major version is derived from the package name, as
-   * outlined below. If the field is not empty, the version in the
-   * package name will be verified to be consistent with what is
-   * provided here.
-   * The versioning schema uses [semantic
-   * versioning](http://semver.org) where the major version number
-   * indicates a breaking change and the minor version an additive,
-   * non-breaking change. Both version numbers are signals to users
-   * what to expect from different versions, and should be carefully
-   * chosen based on the product plan.
-   * The major version is also reflected in the package name of the
-   * API, which must end in `v<major-version>`, as in
-   * `google.feature.v1`. For major versions 0 and 1, the suffix can
-   * be omitted. Zero major versions must only be used for
-   * experimental, none-GA apis.
-   * 
- * - * string version = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - public static final int SOURCE_CONTEXT_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext sourceContext_; - /** - *
-   * Source context for the protocol buffer service represented by this
-   * message.
-   * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public boolean hasSourceContext() { - return sourceContext_ != null; - } - /** - *
-   * Source context for the protocol buffer service represented by this
-   * message.
-   * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext() { - return sourceContext_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance() : sourceContext_; - } - /** - *
-   * Source context for the protocol buffer service represented by this
-   * message.
-   * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder() { - return getSourceContext(); - } - - public static final int MIXINS_FIELD_NUMBER = 6; - private java.util.List mixins_; - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public java.util.List getMixinsList() { - return mixins_; - } - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public java.util.List - getMixinsOrBuilderList() { - return mixins_; - } - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public int getMixinsCount() { - return mixins_.size(); - } - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin getMixins(int index) { - return mixins_.get(index); - } - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder getMixinsOrBuilder( - int index) { - return mixins_.get(index); - } - - public static final int SYNTAX_FIELD_NUMBER = 7; - private int syntax_; - /** - *
-   * The source syntax of the service.
-   * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public int getSyntaxValue() { - return syntax_; - } - /** - *
-   * The source syntax of the service.
-   * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax result = org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.valueOf(syntax_); - return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.UNRECOGNIZED : result; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getNameBytes().isEmpty()) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); - } - for (int i = 0; i < methods_.size(); i++) { - output.writeMessage(2, methods_.get(i)); - } - for (int i = 0; i < options_.size(); i++) { - output.writeMessage(3, options_.get(i)); - } - if (!getVersionBytes().isEmpty()) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_); - } - if (sourceContext_ != null) { - output.writeMessage(5, getSourceContext()); - } - for (int i = 0; i < mixins_.size(); i++) { - output.writeMessage(6, mixins_.get(i)); - } - if (syntax_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.SYNTAX_PROTO2.getNumber()) { - output.writeEnum(7, syntax_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getNameBytes().isEmpty()) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); - } - for (int i = 0; i < methods_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(2, methods_.get(i)); - } - for (int i = 0; i < options_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(3, options_.get(i)); - } - if (!getVersionBytes().isEmpty()) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_); - } - if (sourceContext_ != null) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(5, getSourceContext()); - } - for (int i = 0; i < mixins_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(6, mixins_.get(i)); - } - if (syntax_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.SYNTAX_PROTO2.getNumber()) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeEnumSize(7, syntax_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Api)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.Api other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Api) obj; - - boolean result = true; - result = result && getName() - .equals(other.getName()); - result = result && getMethodsList() - .equals(other.getMethodsList()); - result = result && getOptionsList() - .equals(other.getOptionsList()); - result = result && getVersion() - .equals(other.getVersion()); - result = result && (hasSourceContext() == other.hasSourceContext()); - if (hasSourceContext()) { - result = result && getSourceContext() - .equals(other.getSourceContext()); - } - result = result && getMixinsList() - .equals(other.getMixinsList()); - result = result && syntax_ == other.syntax_; - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - if (getMethodsCount() > 0) { - hash = (37 * hash) + METHODS_FIELD_NUMBER; - hash = (53 * hash) + getMethodsList().hashCode(); - } - if (getOptionsCount() > 0) { - hash = (37 * hash) + OPTIONS_FIELD_NUMBER; - hash = (53 * hash) + getOptionsList().hashCode(); - } - hash = (37 * hash) + VERSION_FIELD_NUMBER; - hash = (53 * hash) + getVersion().hashCode(); - if (hasSourceContext()) { - hash = (37 * hash) + SOURCE_CONTEXT_FIELD_NUMBER; - hash = (53 * hash) + getSourceContext().hashCode(); - } - if (getMixinsCount() > 0) { - hash = (37 * hash) + MIXINS_FIELD_NUMBER; - hash = (53 * hash) + getMixinsList().hashCode(); - } - hash = (37 * hash) + SYNTAX_FIELD_NUMBER; - hash = (53 * hash) + syntax_; - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Api prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-   * Api is a light-weight descriptor for a protocol buffer service.
-   * 
- * - * Protobuf type {@code google.protobuf.Api} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.Api) - org.apache.hadoop.hbase.shaded.com.google.protobuf.ApiOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.ApiProto.internal_static_google_protobuf_Api_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.ApiProto.internal_static_google_protobuf_Api_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getMethodsFieldBuilder(); - getOptionsFieldBuilder(); - getMixinsFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - name_ = ""; - - if (methodsBuilder_ == null) { - methods_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - methodsBuilder_.clear(); - } - if (optionsBuilder_ == null) { - options_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - optionsBuilder_.clear(); - } - version_ = ""; - - if (sourceContextBuilder_ == null) { - sourceContext_ = null; - } else { - sourceContext_ = null; - sourceContextBuilder_ = null; - } - if (mixinsBuilder_ == null) { - mixins_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - } else { - mixinsBuilder_.clear(); - } - syntax_ = 0; - - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.ApiProto.internal_static_google_protobuf_Api_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Api getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Api build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Api result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Api buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Api result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Api(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - result.name_ = name_; - if (methodsBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - methods_ = java.util.Collections.unmodifiableList(methods_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.methods_ = methods_; - } else { - result.methods_ = methodsBuilder_.build(); - } - if (optionsBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - options_ = java.util.Collections.unmodifiableList(options_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.options_ = options_; - } else { - result.options_ = optionsBuilder_.build(); - } - result.version_ = version_; - if (sourceContextBuilder_ == null) { - result.sourceContext_ = sourceContext_; - } else { - result.sourceContext_ = sourceContextBuilder_.build(); - } - if (mixinsBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020)) { - mixins_ = java.util.Collections.unmodifiableList(mixins_); - bitField0_ = (bitField0_ & ~0x00000020); - } - result.mixins_ = mixins_; - } else { - result.mixins_ = mixinsBuilder_.build(); - } - result.syntax_ = syntax_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Api) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.Api)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Api other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.Api.getDefaultInstance()) return this; - if (!other.getName().isEmpty()) { - name_ = other.name_; - onChanged(); - } - if (methodsBuilder_ == null) { - if (!other.methods_.isEmpty()) { - if (methods_.isEmpty()) { - methods_ = other.methods_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureMethodsIsMutable(); - methods_.addAll(other.methods_); - } - onChanged(); - } - } else { - if (!other.methods_.isEmpty()) { - if (methodsBuilder_.isEmpty()) { - methodsBuilder_.dispose(); - methodsBuilder_ = null; - methods_ = other.methods_; - bitField0_ = (bitField0_ & ~0x00000002); - methodsBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getMethodsFieldBuilder() : null; - } else { - methodsBuilder_.addAllMessages(other.methods_); - } - } - } - if (optionsBuilder_ == null) { - if (!other.options_.isEmpty()) { - if (options_.isEmpty()) { - options_ = other.options_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureOptionsIsMutable(); - options_.addAll(other.options_); - } - onChanged(); - } - } else { - if (!other.options_.isEmpty()) { - if (optionsBuilder_.isEmpty()) { - optionsBuilder_.dispose(); - optionsBuilder_ = null; - options_ = other.options_; - bitField0_ = (bitField0_ & ~0x00000004); - optionsBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getOptionsFieldBuilder() : null; - } else { - optionsBuilder_.addAllMessages(other.options_); - } - } - } - if (!other.getVersion().isEmpty()) { - version_ = other.version_; - onChanged(); - } - if (other.hasSourceContext()) { - mergeSourceContext(other.getSourceContext()); - } - if (mixinsBuilder_ == null) { - if (!other.mixins_.isEmpty()) { - if (mixins_.isEmpty()) { - mixins_ = other.mixins_; - bitField0_ = (bitField0_ & ~0x00000020); - } else { - ensureMixinsIsMutable(); - mixins_.addAll(other.mixins_); - } - onChanged(); - } - } else { - if (!other.mixins_.isEmpty()) { - if (mixinsBuilder_.isEmpty()) { - mixinsBuilder_.dispose(); - mixinsBuilder_ = null; - mixins_ = other.mixins_; - bitField0_ = (bitField0_ & ~0x00000020); - mixinsBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getMixinsFieldBuilder() : null; - } else { - mixinsBuilder_.addAllMessages(other.mixins_); - } - } - } - if (other.syntax_ != 0) { - setSyntaxValue(other.getSyntaxValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Api) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.lang.Object name_ = ""; - /** - *
-     * The fully qualified name of this api, including package name
-     * followed by the api's simple name.
-     * 
- * - * string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-     * The fully qualified name of this api, including package name
-     * followed by the api's simple name.
-     * 
- * - * string name = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - *
-     * The fully qualified name of this api, including package name
-     * followed by the api's simple name.
-     * 
- * - * string name = 1; - */ - public Builder setName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - name_ = value; - onChanged(); - return this; - } - /** - *
-     * The fully qualified name of this api, including package name
-     * followed by the api's simple name.
-     * 
- * - * string name = 1; - */ - public Builder clearName() { - - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - /** - *
-     * The fully qualified name of this api, including package name
-     * followed by the api's simple name.
-     * 
- * - * string name = 1; - */ - public Builder setNameBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - name_ = value; - onChanged(); - return this; - } - - private java.util.List methods_ = - java.util.Collections.emptyList(); - private void ensureMethodsIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - methods_ = new java.util.ArrayList(methods_); - bitField0_ |= 0x00000002; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Method, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder> methodsBuilder_; - - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public java.util.List getMethodsList() { - if (methodsBuilder_ == null) { - return java.util.Collections.unmodifiableList(methods_); - } else { - return methodsBuilder_.getMessageList(); - } - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public int getMethodsCount() { - if (methodsBuilder_ == null) { - return methods_.size(); - } else { - return methodsBuilder_.getCount(); - } - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Method getMethods(int index) { - if (methodsBuilder_ == null) { - return methods_.get(index); - } else { - return methodsBuilder_.getMessage(index); - } - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder setMethods( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method value) { - if (methodsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMethodsIsMutable(); - methods_.set(index, value); - onChanged(); - } else { - methodsBuilder_.setMessage(index, value); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder setMethods( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder builderForValue) { - if (methodsBuilder_ == null) { - ensureMethodsIsMutable(); - methods_.set(index, builderForValue.build()); - onChanged(); - } else { - methodsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder addMethods(org.apache.hadoop.hbase.shaded.com.google.protobuf.Method value) { - if (methodsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMethodsIsMutable(); - methods_.add(value); - onChanged(); - } else { - methodsBuilder_.addMessage(value); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder addMethods( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method value) { - if (methodsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMethodsIsMutable(); - methods_.add(index, value); - onChanged(); - } else { - methodsBuilder_.addMessage(index, value); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder addMethods( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder builderForValue) { - if (methodsBuilder_ == null) { - ensureMethodsIsMutable(); - methods_.add(builderForValue.build()); - onChanged(); - } else { - methodsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder addMethods( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder builderForValue) { - if (methodsBuilder_ == null) { - ensureMethodsIsMutable(); - methods_.add(index, builderForValue.build()); - onChanged(); - } else { - methodsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder addAllMethods( - java.lang.Iterable values) { - if (methodsBuilder_ == null) { - ensureMethodsIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, methods_); - onChanged(); - } else { - methodsBuilder_.addAllMessages(values); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder clearMethods() { - if (methodsBuilder_ == null) { - methods_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - methodsBuilder_.clear(); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public Builder removeMethods(int index) { - if (methodsBuilder_ == null) { - ensureMethodsIsMutable(); - methods_.remove(index); - onChanged(); - } else { - methodsBuilder_.remove(index); - } - return this; - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder getMethodsBuilder( - int index) { - return getMethodsFieldBuilder().getBuilder(index); - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder getMethodsOrBuilder( - int index) { - if (methodsBuilder_ == null) { - return methods_.get(index); } else { - return methodsBuilder_.getMessageOrBuilder(index); - } - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public java.util.List - getMethodsOrBuilderList() { - if (methodsBuilder_ != null) { - return methodsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(methods_); - } - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder addMethodsBuilder() { - return getMethodsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.getDefaultInstance()); - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder addMethodsBuilder( - int index) { - return getMethodsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.getDefaultInstance()); - } - /** - *
-     * The methods of this api, in unspecified order.
-     * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - public java.util.List - getMethodsBuilderList() { - return getMethodsFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Method, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder> - getMethodsFieldBuilder() { - if (methodsBuilder_ == null) { - methodsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Method, org.apache.hadoop.hbase.shaded.com.google.protobuf.Method.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder>( - methods_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - methods_ = null; - } - return methodsBuilder_; - } - - private java.util.List options_ = - java.util.Collections.emptyList(); - private void ensureOptionsIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - options_ = new java.util.ArrayList(options_); - bitField0_ |= 0x00000004; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Option, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> optionsBuilder_; - - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public java.util.List getOptionsList() { - if (optionsBuilder_ == null) { - return java.util.Collections.unmodifiableList(options_); - } else { - return optionsBuilder_.getMessageList(); - } - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public int getOptionsCount() { - if (optionsBuilder_ == null) { - return options_.size(); - } else { - return optionsBuilder_.getCount(); - } - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index) { - if (optionsBuilder_ == null) { - return options_.get(index); - } else { - return optionsBuilder_.getMessage(index); - } - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder setOptions( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option value) { - if (optionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOptionsIsMutable(); - options_.set(index, value); - onChanged(); - } else { - optionsBuilder_.setMessage(index, value); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder setOptions( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder builderForValue) { - if (optionsBuilder_ == null) { - ensureOptionsIsMutable(); - options_.set(index, builderForValue.build()); - onChanged(); - } else { - optionsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder addOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.Option value) { - if (optionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOptionsIsMutable(); - options_.add(value); - onChanged(); - } else { - optionsBuilder_.addMessage(value); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder addOptions( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option value) { - if (optionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOptionsIsMutable(); - options_.add(index, value); - onChanged(); - } else { - optionsBuilder_.addMessage(index, value); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder addOptions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder builderForValue) { - if (optionsBuilder_ == null) { - ensureOptionsIsMutable(); - options_.add(builderForValue.build()); - onChanged(); - } else { - optionsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder addOptions( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder builderForValue) { - if (optionsBuilder_ == null) { - ensureOptionsIsMutable(); - options_.add(index, builderForValue.build()); - onChanged(); - } else { - optionsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder addAllOptions( - java.lang.Iterable values) { - if (optionsBuilder_ == null) { - ensureOptionsIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, options_); - onChanged(); - } else { - optionsBuilder_.addAllMessages(values); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder clearOptions() { - if (optionsBuilder_ == null) { - options_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - optionsBuilder_.clear(); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public Builder removeOptions(int index) { - if (optionsBuilder_ == null) { - ensureOptionsIsMutable(); - options_.remove(index); - onChanged(); - } else { - optionsBuilder_.remove(index); - } - return this; - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder getOptionsBuilder( - int index) { - return getOptionsFieldBuilder().getBuilder(index); - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder( - int index) { - if (optionsBuilder_ == null) { - return options_.get(index); } else { - return optionsBuilder_.getMessageOrBuilder(index); - } - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public java.util.List - getOptionsOrBuilderList() { - if (optionsBuilder_ != null) { - return optionsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(options_); - } - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder addOptionsBuilder() { - return getOptionsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.getDefaultInstance()); - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder addOptionsBuilder( - int index) { - return getOptionsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.getDefaultInstance()); - } - /** - *
-     * Any metadata attached to the API.
-     * 
- * - * repeated .google.protobuf.Option options = 3; - */ - public java.util.List - getOptionsBuilderList() { - return getOptionsFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Option, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder> - getOptionsFieldBuilder() { - if (optionsBuilder_ == null) { - optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Option, org.apache.hadoop.hbase.shaded.com.google.protobuf.Option.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder>( - options_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - options_ = null; - } - return optionsBuilder_; - } - - private java.lang.Object version_ = ""; - /** - *
-     * A version string for this api. If specified, must have the form
-     * `major-version.minor-version`, as in `1.10`. If the minor version
-     * is omitted, it defaults to zero. If the entire version field is
-     * empty, the major version is derived from the package name, as
-     * outlined below. If the field is not empty, the version in the
-     * package name will be verified to be consistent with what is
-     * provided here.
-     * The versioning schema uses [semantic
-     * versioning](http://semver.org) where the major version number
-     * indicates a breaking change and the minor version an additive,
-     * non-breaking change. Both version numbers are signals to users
-     * what to expect from different versions, and should be carefully
-     * chosen based on the product plan.
-     * The major version is also reflected in the package name of the
-     * API, which must end in `v<major-version>`, as in
-     * `google.feature.v1`. For major versions 0 and 1, the suffix can
-     * be omitted. Zero major versions must only be used for
-     * experimental, none-GA apis.
-     * 
- * - * string version = 4; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-     * A version string for this api. If specified, must have the form
-     * `major-version.minor-version`, as in `1.10`. If the minor version
-     * is omitted, it defaults to zero. If the entire version field is
-     * empty, the major version is derived from the package name, as
-     * outlined below. If the field is not empty, the version in the
-     * package name will be verified to be consistent with what is
-     * provided here.
-     * The versioning schema uses [semantic
-     * versioning](http://semver.org) where the major version number
-     * indicates a breaking change and the minor version an additive,
-     * non-breaking change. Both version numbers are signals to users
-     * what to expect from different versions, and should be carefully
-     * chosen based on the product plan.
-     * The major version is also reflected in the package name of the
-     * API, which must end in `v<major-version>`, as in
-     * `google.feature.v1`. For major versions 0 and 1, the suffix can
-     * be omitted. Zero major versions must only be used for
-     * experimental, none-GA apis.
-     * 
- * - * string version = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - *
-     * A version string for this api. If specified, must have the form
-     * `major-version.minor-version`, as in `1.10`. If the minor version
-     * is omitted, it defaults to zero. If the entire version field is
-     * empty, the major version is derived from the package name, as
-     * outlined below. If the field is not empty, the version in the
-     * package name will be verified to be consistent with what is
-     * provided here.
-     * The versioning schema uses [semantic
-     * versioning](http://semver.org) where the major version number
-     * indicates a breaking change and the minor version an additive,
-     * non-breaking change. Both version numbers are signals to users
-     * what to expect from different versions, and should be carefully
-     * chosen based on the product plan.
-     * The major version is also reflected in the package name of the
-     * API, which must end in `v<major-version>`, as in
-     * `google.feature.v1`. For major versions 0 and 1, the suffix can
-     * be omitted. Zero major versions must only be used for
-     * experimental, none-GA apis.
-     * 
- * - * string version = 4; - */ - public Builder setVersion( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - version_ = value; - onChanged(); - return this; - } - /** - *
-     * A version string for this api. If specified, must have the form
-     * `major-version.minor-version`, as in `1.10`. If the minor version
-     * is omitted, it defaults to zero. If the entire version field is
-     * empty, the major version is derived from the package name, as
-     * outlined below. If the field is not empty, the version in the
-     * package name will be verified to be consistent with what is
-     * provided here.
-     * The versioning schema uses [semantic
-     * versioning](http://semver.org) where the major version number
-     * indicates a breaking change and the minor version an additive,
-     * non-breaking change. Both version numbers are signals to users
-     * what to expect from different versions, and should be carefully
-     * chosen based on the product plan.
-     * The major version is also reflected in the package name of the
-     * API, which must end in `v<major-version>`, as in
-     * `google.feature.v1`. For major versions 0 and 1, the suffix can
-     * be omitted. Zero major versions must only be used for
-     * experimental, none-GA apis.
-     * 
- * - * string version = 4; - */ - public Builder clearVersion() { - - version_ = getDefaultInstance().getVersion(); - onChanged(); - return this; - } - /** - *
-     * A version string for this api. If specified, must have the form
-     * `major-version.minor-version`, as in `1.10`. If the minor version
-     * is omitted, it defaults to zero. If the entire version field is
-     * empty, the major version is derived from the package name, as
-     * outlined below. If the field is not empty, the version in the
-     * package name will be verified to be consistent with what is
-     * provided here.
-     * The versioning schema uses [semantic
-     * versioning](http://semver.org) where the major version number
-     * indicates a breaking change and the minor version an additive,
-     * non-breaking change. Both version numbers are signals to users
-     * what to expect from different versions, and should be carefully
-     * chosen based on the product plan.
-     * The major version is also reflected in the package name of the
-     * API, which must end in `v<major-version>`, as in
-     * `google.feature.v1`. For major versions 0 and 1, the suffix can
-     * be omitted. Zero major versions must only be used for
-     * experimental, none-GA apis.
-     * 
- * - * string version = 4; - */ - public Builder setVersionBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - version_ = value; - onChanged(); - return this; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext sourceContext_ = null; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder> sourceContextBuilder_; - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public boolean hasSourceContext() { - return sourceContextBuilder_ != null || sourceContext_ != null; - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext() { - if (sourceContextBuilder_ == null) { - return sourceContext_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance() : sourceContext_; - } else { - return sourceContextBuilder_.getMessage(); - } - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public Builder setSourceContext(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext value) { - if (sourceContextBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - sourceContext_ = value; - onChanged(); - } else { - sourceContextBuilder_.setMessage(value); - } - - return this; - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public Builder setSourceContext( - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder builderForValue) { - if (sourceContextBuilder_ == null) { - sourceContext_ = builderForValue.build(); - onChanged(); - } else { - sourceContextBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public Builder mergeSourceContext(org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext value) { - if (sourceContextBuilder_ == null) { - if (sourceContext_ != null) { - sourceContext_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.newBuilder(sourceContext_).mergeFrom(value).buildPartial(); - } else { - sourceContext_ = value; - } - onChanged(); - } else { - sourceContextBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public Builder clearSourceContext() { - if (sourceContextBuilder_ == null) { - sourceContext_ = null; - onChanged(); - } else { - sourceContext_ = null; - sourceContextBuilder_ = null; - } - - return this; - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder getSourceContextBuilder() { - - onChanged(); - return getSourceContextFieldBuilder().getBuilder(); - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder() { - if (sourceContextBuilder_ != null) { - return sourceContextBuilder_.getMessageOrBuilder(); - } else { - return sourceContext_ == null ? - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.getDefaultInstance() : sourceContext_; - } - } - /** - *
-     * Source context for the protocol buffer service represented by this
-     * message.
-     * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder> - getSourceContextFieldBuilder() { - if (sourceContextBuilder_ == null) { - sourceContextBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder>( - getSourceContext(), - getParentForChildren(), - isClean()); - sourceContext_ = null; - } - return sourceContextBuilder_; - } - - private java.util.List mixins_ = - java.util.Collections.emptyList(); - private void ensureMixinsIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - mixins_ = new java.util.ArrayList(mixins_); - bitField0_ |= 0x00000020; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder> mixinsBuilder_; - - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public java.util.List getMixinsList() { - if (mixinsBuilder_ == null) { - return java.util.Collections.unmodifiableList(mixins_); - } else { - return mixinsBuilder_.getMessageList(); - } - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public int getMixinsCount() { - if (mixinsBuilder_ == null) { - return mixins_.size(); - } else { - return mixinsBuilder_.getCount(); - } - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin getMixins(int index) { - if (mixinsBuilder_ == null) { - return mixins_.get(index); - } else { - return mixinsBuilder_.getMessage(index); - } - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder setMixins( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin value) { - if (mixinsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMixinsIsMutable(); - mixins_.set(index, value); - onChanged(); - } else { - mixinsBuilder_.setMessage(index, value); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder setMixins( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder builderForValue) { - if (mixinsBuilder_ == null) { - ensureMixinsIsMutable(); - mixins_.set(index, builderForValue.build()); - onChanged(); - } else { - mixinsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder addMixins(org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin value) { - if (mixinsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMixinsIsMutable(); - mixins_.add(value); - onChanged(); - } else { - mixinsBuilder_.addMessage(value); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder addMixins( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin value) { - if (mixinsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMixinsIsMutable(); - mixins_.add(index, value); - onChanged(); - } else { - mixinsBuilder_.addMessage(index, value); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder addMixins( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder builderForValue) { - if (mixinsBuilder_ == null) { - ensureMixinsIsMutable(); - mixins_.add(builderForValue.build()); - onChanged(); - } else { - mixinsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder addMixins( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder builderForValue) { - if (mixinsBuilder_ == null) { - ensureMixinsIsMutable(); - mixins_.add(index, builderForValue.build()); - onChanged(); - } else { - mixinsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder addAllMixins( - java.lang.Iterable values) { - if (mixinsBuilder_ == null) { - ensureMixinsIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, mixins_); - onChanged(); - } else { - mixinsBuilder_.addAllMessages(values); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder clearMixins() { - if (mixinsBuilder_ == null) { - mixins_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - } else { - mixinsBuilder_.clear(); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public Builder removeMixins(int index) { - if (mixinsBuilder_ == null) { - ensureMixinsIsMutable(); - mixins_.remove(index); - onChanged(); - } else { - mixinsBuilder_.remove(index); - } - return this; - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder getMixinsBuilder( - int index) { - return getMixinsFieldBuilder().getBuilder(index); - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder getMixinsOrBuilder( - int index) { - if (mixinsBuilder_ == null) { - return mixins_.get(index); } else { - return mixinsBuilder_.getMessageOrBuilder(index); - } - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public java.util.List - getMixinsOrBuilderList() { - if (mixinsBuilder_ != null) { - return mixinsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(mixins_); - } - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder addMixinsBuilder() { - return getMixinsFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.getDefaultInstance()); - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder addMixinsBuilder( - int index) { - return getMixinsFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.getDefaultInstance()); - } - /** - *
-     * Included APIs. See [Mixin][].
-     * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - public java.util.List - getMixinsBuilderList() { - return getMixinsFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder> - getMixinsFieldBuilder() { - if (mixinsBuilder_ == null) { - mixinsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin, org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder>( - mixins_, - ((bitField0_ & 0x00000020) == 0x00000020), - getParentForChildren(), - isClean()); - mixins_ = null; - } - return mixinsBuilder_; - } - - private int syntax_ = 0; - /** - *
-     * The source syntax of the service.
-     * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public int getSyntaxValue() { - return syntax_; - } - /** - *
-     * The source syntax of the service.
-     * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public Builder setSyntaxValue(int value) { - syntax_ = value; - onChanged(); - return this; - } - /** - *
-     * The source syntax of the service.
-     * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax result = org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.valueOf(syntax_); - return result == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax.UNRECOGNIZED : result; - } - /** - *
-     * The source syntax of the service.
-     * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public Builder setSyntax(org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax value) { - if (value == null) { - throw new NullPointerException(); - } - - syntax_ = value.getNumber(); - onChanged(); - return this; - } - /** - *
-     * The source syntax of the service.
-     * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - public Builder clearSyntax() { - - syntax_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.Api) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.Api) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Api DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Api(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public Api parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new Api(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Api getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiOrBuilder.java deleted file mode 100644 index b8b7030e1c4..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiOrBuilder.java +++ /dev/null @@ -1,258 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/api.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public interface ApiOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.Api) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-   * The fully qualified name of this api, including package name
-   * followed by the api's simple name.
-   * 
- * - * string name = 1; - */ - java.lang.String getName(); - /** - *
-   * The fully qualified name of this api, including package name
-   * followed by the api's simple name.
-   * 
- * - * string name = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes(); - - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - java.util.List - getMethodsList(); - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.Method getMethods(int index); - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - int getMethodsCount(); - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - java.util.List - getMethodsOrBuilderList(); - /** - *
-   * The methods of this api, in unspecified order.
-   * 
- * - * repeated .google.protobuf.Method methods = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.MethodOrBuilder getMethodsOrBuilder( - int index); - - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - java.util.List - getOptionsList(); - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.Option getOptions(int index); - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - int getOptionsCount(); - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - java.util.List - getOptionsOrBuilderList(); - /** - *
-   * Any metadata attached to the API.
-   * 
- * - * repeated .google.protobuf.Option options = 3; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.OptionOrBuilder getOptionsOrBuilder( - int index); - - /** - *
-   * A version string for this api. If specified, must have the form
-   * `major-version.minor-version`, as in `1.10`. If the minor version
-   * is omitted, it defaults to zero. If the entire version field is
-   * empty, the major version is derived from the package name, as
-   * outlined below. If the field is not empty, the version in the
-   * package name will be verified to be consistent with what is
-   * provided here.
-   * The versioning schema uses [semantic
-   * versioning](http://semver.org) where the major version number
-   * indicates a breaking change and the minor version an additive,
-   * non-breaking change. Both version numbers are signals to users
-   * what to expect from different versions, and should be carefully
-   * chosen based on the product plan.
-   * The major version is also reflected in the package name of the
-   * API, which must end in `v<major-version>`, as in
-   * `google.feature.v1`. For major versions 0 and 1, the suffix can
-   * be omitted. Zero major versions must only be used for
-   * experimental, none-GA apis.
-   * 
- * - * string version = 4; - */ - java.lang.String getVersion(); - /** - *
-   * A version string for this api. If specified, must have the form
-   * `major-version.minor-version`, as in `1.10`. If the minor version
-   * is omitted, it defaults to zero. If the entire version field is
-   * empty, the major version is derived from the package name, as
-   * outlined below. If the field is not empty, the version in the
-   * package name will be verified to be consistent with what is
-   * provided here.
-   * The versioning schema uses [semantic
-   * versioning](http://semver.org) where the major version number
-   * indicates a breaking change and the minor version an additive,
-   * non-breaking change. Both version numbers are signals to users
-   * what to expect from different versions, and should be carefully
-   * chosen based on the product plan.
-   * The major version is also reflected in the package name of the
-   * API, which must end in `v<major-version>`, as in
-   * `google.feature.v1`. For major versions 0 and 1, the suffix can
-   * be omitted. Zero major versions must only be used for
-   * experimental, none-GA apis.
-   * 
- * - * string version = 4; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getVersionBytes(); - - /** - *
-   * Source context for the protocol buffer service represented by this
-   * message.
-   * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - boolean hasSourceContext(); - /** - *
-   * Source context for the protocol buffer service represented by this
-   * message.
-   * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext getSourceContext(); - /** - *
-   * Source context for the protocol buffer service represented by this
-   * message.
-   * 
- * - * .google.protobuf.SourceContext source_context = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextOrBuilder getSourceContextOrBuilder(); - - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - java.util.List - getMixinsList(); - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin getMixins(int index); - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - int getMixinsCount(); - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - java.util.List - getMixinsOrBuilderList(); - /** - *
-   * Included APIs. See [Mixin][].
-   * 
- * - * repeated .google.protobuf.Mixin mixins = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.MixinOrBuilder getMixinsOrBuilder( - int index); - - /** - *
-   * The source syntax of the service.
-   * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - int getSyntaxValue(); - /** - *
-   * The source syntax of the service.
-   * 
- * - * .google.protobuf.Syntax syntax = 7; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.Syntax getSyntax(); -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiProto.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiProto.java deleted file mode 100644 index 817e07d5e9f..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ApiProto.java +++ /dev/null @@ -1,98 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/api.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public final class ApiProto { - private ApiProto() {} - public static void registerAllExtensions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { - } - - public static void registerAllExtensions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); - } - static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - internal_static_google_protobuf_Api_descriptor; - static final - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_google_protobuf_Api_fieldAccessorTable; - static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - internal_static_google_protobuf_Method_descriptor; - static final - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_google_protobuf_Method_fieldAccessorTable; - static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - internal_static_google_protobuf_Mixin_descriptor; - static final - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_google_protobuf_Mixin_fieldAccessorTable; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\031google/protobuf/api.proto\022\017google.prot" + - "obuf\032$google/protobuf/source_context.pro" + - "to\032\032google/protobuf/type.proto\"\201\002\n\003Api\022\014" + - "\n\004name\030\001 \001(\t\022(\n\007methods\030\002 \003(\0132\027.google.p" + - "rotobuf.Method\022(\n\007options\030\003 \003(\0132\027.google" + - ".protobuf.Option\022\017\n\007version\030\004 \001(\t\0226\n\016sou" + - "rce_context\030\005 \001(\0132\036.google.protobuf.Sour" + - "ceContext\022&\n\006mixins\030\006 \003(\0132\026.google.proto" + - "buf.Mixin\022\'\n\006syntax\030\007 \001(\0162\027.google.proto" + - "buf.Syntax\"\325\001\n\006Method\022\014\n\004name\030\001 \001(\t\022\030\n\020r", - "equest_type_url\030\002 \001(\t\022\031\n\021request_streami" + - "ng\030\003 \001(\010\022\031\n\021response_type_url\030\004 \001(\t\022\032\n\022r" + - "esponse_streaming\030\005 \001(\010\022(\n\007options\030\006 \003(\013" + - "2\027.google.protobuf.Option\022\'\n\006syntax\030\007 \001(" + - "\0162\027.google.protobuf.Syntax\"#\n\005Mixin\022\014\n\004n" + - "ame\030\001 \001(\t\022\014\n\004root\030\002 \001(\tBu\n\023com.google.pr" + - "otobufB\010ApiProtoP\001Z+google.golang.org/ge" + - "nproto/protobuf/api;api\242\002\003GPB\252\002\036Google.P" + - "rotobuf.WellKnownTypesb\006proto3" - }; - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.getDescriptor(), - org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.getDescriptor(), - }, assigner); - internal_static_google_protobuf_Api_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_protobuf_Api_fieldAccessorTable = new - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_google_protobuf_Api_descriptor, - new java.lang.String[] { "Name", "Methods", "Options", "Version", "SourceContext", "Mixins", "Syntax", }); - internal_static_google_protobuf_Method_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_google_protobuf_Method_fieldAccessorTable = new - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_google_protobuf_Method_descriptor, - new java.lang.String[] { "Name", "RequestTypeUrl", "RequestStreaming", "ResponseTypeUrl", "ResponseStreaming", "Options", "Syntax", }); - internal_static_google_protobuf_Mixin_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_google_protobuf_Mixin_fieldAccessorTable = new - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_google_protobuf_Mixin_descriptor, - new java.lang.String[] { "Name", "Root", }); - org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContextProto.getDescriptor(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.TypeProto.getDescriptor(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingRpcChannel.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingRpcChannel.java deleted file mode 100644 index bd2f2cd2fa8..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingRpcChannel.java +++ /dev/null @@ -1,51 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *

Abstract interface for a blocking RPC channel. {@code BlockingRpcChannel} - * is the blocking equivalent to {@link RpcChannel}. - * - * @author kenton@google.com Kenton Varda - * @author cpovirk@google.com Chris Povirk - */ -public interface BlockingRpcChannel { - /** - * Call the given method of the remote service and blocks until it returns. - * {@code callBlockingMethod()} is the blocking equivalent to - * {@link RpcChannel#callMethod}. - */ - Message callBlockingMethod( - Descriptors.MethodDescriptor method, - RpcController controller, - Message request, - Message responsePrototype) throws ServiceException; -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingService.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingService.java deleted file mode 100644 index f496f2dac48..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BlockingService.java +++ /dev/null @@ -1,64 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - * Blocking equivalent to {@link Service}. - * - * @author kenton@google.com Kenton Varda - * @author cpovirk@google.com Chris Povirk - */ -public interface BlockingService { - /** - * Equivalent to {@link Service#getDescriptorForType}. - */ - Descriptors.ServiceDescriptor getDescriptorForType(); - - /** - * Equivalent to {@link Service#callMethod}, except that - * {@code callBlockingMethod()} returns the result of the RPC or throws a - * {@link ServiceException} if there is a failure, rather than passing the - * information to a callback. - */ - Message callBlockingMethod(Descriptors.MethodDescriptor method, - RpcController controller, - Message request) throws ServiceException; - - /** - * Equivalent to {@link Service#getRequestPrototype}. - */ - Message getRequestPrototype(Descriptors.MethodDescriptor method); - - /** - * Equivalent to {@link Service#getResponsePrototype}. - */ - Message getResponsePrototype(Descriptors.MethodDescriptor method); -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java deleted file mode 100644 index a2dbe4a9467..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java +++ /dev/null @@ -1,452 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *

- * Wrapper message for `bool`.
- * The JSON representation for `BoolValue` is JSON `true` and `false`.
- * 
- * - * Protobuf type {@code google.protobuf.BoolValue} - */ -public final class BoolValue extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.BoolValue) - BoolValueOrBuilder { - // Use BoolValue.newBuilder() to construct. - private BoolValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private BoolValue() { - value_ = false; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private BoolValue( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 8: { - - value_ = input.readBool(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.Builder.class); - } - - public static final int VALUE_FIELD_NUMBER = 1; - private boolean value_; - /** - *
-   * The bool value.
-   * 
- * - * bool value = 1; - */ - public boolean getValue() { - return value_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (value_ != false) { - output.writeBool(1, value_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (value_ != false) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeBoolSize(1, value_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) obj; - - boolean result = true; - result = result && (getValue() - == other.getValue()); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashBoolean( - getValue()); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-   * Wrapper message for `bool`.
-   * The JSON representation for `BoolValue` is JSON `true` and `false`.
-   * 
- * - * Protobuf type {@code google.protobuf.BoolValue} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.BoolValue) - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValueOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - value_ = false; - - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BoolValue_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue(this); - result.value_ = value_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue.getDefaultInstance()) return this; - if (other.getValue() != false) { - setValue(other.getValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private boolean value_ ; - /** - *
-     * The bool value.
-     * 
- * - * bool value = 1; - */ - public boolean getValue() { - return value_; - } - /** - *
-     * The bool value.
-     * 
- * - * bool value = 1; - */ - public Builder setValue(boolean value) { - - value_ = value; - onChanged(); - return this; - } - /** - *
-     * The bool value.
-     * 
- * - * bool value = 1; - */ - public Builder clearValue() { - - value_ = false; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.BoolValue) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.BoolValue) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public BoolValue parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new BoolValue(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValueOrBuilder.java deleted file mode 100644 index c1d3b5f0ddc..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValueOrBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public interface BoolValueOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.BoolValue) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-   * The bool value.
-   * 
- * - * bool value = 1; - */ - boolean getValue(); -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java deleted file mode 100644 index 6bc9641747d..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java +++ /dev/null @@ -1,272 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.BooleanList; - -import java.util.Arrays; -import java.util.Collection; -import java.util.RandomAccess; - -/** - * An implementation of {@link BooleanList} on top of a primitive array. - * - * @author dweis@google.com (Daniel Weis) - */ -final class BooleanArrayList - extends AbstractProtobufList - implements BooleanList, RandomAccess { - - private static final BooleanArrayList EMPTY_LIST = new BooleanArrayList(); - static { - EMPTY_LIST.makeImmutable(); - } - - public static BooleanArrayList emptyList() { - return EMPTY_LIST; - } - - /** - * The backing store for the list. - */ - private boolean[] array; - - /** - * The size of the list distinct from the length of the array. That is, it is the number of - * elements set in the list. - */ - private int size; - - /** - * Constructs a new mutable {@code BooleanArrayList} with default capacity. - */ - BooleanArrayList() { - this(new boolean[DEFAULT_CAPACITY], 0); - } - - /** - * Constructs a new mutable {@code BooleanArrayList} - * containing the same elements as {@code other}. - */ - private BooleanArrayList(boolean[] other, int size) { - array = other; - this.size = size; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (!(o instanceof BooleanArrayList)) { - return super.equals(o); - } - BooleanArrayList other = (BooleanArrayList) o; - if (size != other.size) { - return false; - } - - final boolean[] arr = other.array; - for (int i = 0; i < size; i++) { - if (array[i] != arr[i]) { - return false; - } - } - - return true; - } - - @Override - public int hashCode() { - int result = 1; - for (int i = 0; i < size; i++) { - result = (31 * result) + Internal.hashBoolean(array[i]); - } - return result; - } - - @Override - public BooleanList mutableCopyWithCapacity(int capacity) { - if (capacity < size) { - throw new IllegalArgumentException(); - } - return new BooleanArrayList(Arrays.copyOf(array, capacity), size); - } - - @Override - public Boolean get(int index) { - return getBoolean(index); - } - - @Override - public boolean getBoolean(int index) { - ensureIndexInRange(index); - return array[index]; - } - - @Override - public int size() { - return size; - } - - @Override - public Boolean set(int index, Boolean element) { - return setBoolean(index, element); - } - - @Override - public boolean setBoolean(int index, boolean element) { - ensureIsMutable(); - ensureIndexInRange(index); - boolean previousValue = array[index]; - array[index] = element; - return previousValue; - } - - @Override - public void add(int index, Boolean element) { - addBoolean(index, element); - } - - /** - * Like {@link #add(Boolean)} but more efficient in that it doesn't box the element. - */ - @Override - public void addBoolean(boolean element) { - addBoolean(size, element); - } - - /** - * Like {@link #add(int, Boolean)} but more efficient in that it doesn't box the element. - */ - private void addBoolean(int index, boolean element) { - ensureIsMutable(); - if (index < 0 || index > size) { - throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index)); - } - - if (size < array.length) { - // Shift everything over to make room - System.arraycopy(array, index, array, index + 1, size - index); - } else { - // Resize to 1.5x the size - int length = ((size * 3) / 2) + 1; - boolean[] newArray = new boolean[length]; - - // Copy the first part directly - System.arraycopy(array, 0, newArray, 0, index); - - // Copy the rest shifted over by one to make room - System.arraycopy(array, index, newArray, index + 1, size - index); - array = newArray; - } - - array[index] = element; - size++; - modCount++; - } - - @Override - public boolean addAll(Collection collection) { - ensureIsMutable(); - - if (collection == null) { - throw new NullPointerException(); - } - - // We specialize when adding another BooleanArrayList to avoid boxing elements. - if (!(collection instanceof BooleanArrayList)) { - return super.addAll(collection); - } - - BooleanArrayList list = (BooleanArrayList) collection; - if (list.size == 0) { - return false; - } - - int overflow = Integer.MAX_VALUE - size; - if (overflow < list.size) { - // We can't actually represent a list this large. - throw new OutOfMemoryError(); - } - - int newSize = size + list.size; - if (newSize > array.length) { - array = Arrays.copyOf(array, newSize); - } - - System.arraycopy(list.array, 0, array, size, list.size); - size = newSize; - modCount++; - return true; - } - - @Override - public boolean remove(Object o) { - ensureIsMutable(); - for (int i = 0; i < size; i++) { - if (o.equals(array[i])) { - System.arraycopy(array, i + 1, array, i, size - i); - size--; - modCount++; - return true; - } - } - return false; - } - - @Override - public Boolean remove(int index) { - ensureIsMutable(); - ensureIndexInRange(index); - boolean value = array[index]; - System.arraycopy(array, index + 1, array, index, size - index); - size--; - modCount++; - return value; - } - - /** - * Ensures that the provided {@code index} is within the range of {@code [0, size]}. Throws an - * {@link IndexOutOfBoundsException} if it is not. - * - * @param index the index to verify is in range - */ - private void ensureIndexInRange(int index) { - if (index < 0 || index >= size) { - throw new IndexOutOfBoundsException(makeOutOfBoundsExceptionMessage(index)); - } - } - - private String makeOutOfBoundsExceptionMessage(int index) { - return "Index:" + index + ", Size:" + size; - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java deleted file mode 100644 index 553243a07a4..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteBufferWriter.java +++ /dev/null @@ -1,185 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import static java.lang.Math.max; -import static java.lang.Math.min; - -import java.io.IOException; -import java.io.OutputStream; -import java.lang.ref.SoftReference; -import java.lang.reflect.Field; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; - -/** - * Utility class to provide efficient writing of {@link ByteBuffer}s to {@link OutputStream}s. - */ -final class ByteBufferWriter { - private ByteBufferWriter() {} - - /** - * Minimum size for a cached buffer. This prevents us from allocating buffers that are too - * small to be easily reused. - */ - // TODO(nathanmittler): tune this property or allow configuration? - private static final int MIN_CACHED_BUFFER_SIZE = 1024; - - /** - * Maximum size for a cached buffer. If a larger buffer is required, it will be allocated - * but not cached. - */ - // TODO(nathanmittler): tune this property or allow configuration? - private static final int MAX_CACHED_BUFFER_SIZE = 16 * 1024; - - /** - * The fraction of the requested buffer size under which the buffer will be reallocated. - */ - // TODO(nathanmittler): tune this property or allow configuration? - private static final float BUFFER_REALLOCATION_THRESHOLD = 0.5f; - - /** - * Keeping a soft reference to a thread-local buffer. This buffer is used for writing a - * {@link ByteBuffer} to an {@link OutputStream} when no zero-copy alternative was available. - * Using a "soft" reference since VMs may keep this reference around longer than "weak" - * (e.g. HotSpot will maintain soft references until memory pressure warrants collection). - */ - private static final ThreadLocal> BUFFER = - new ThreadLocal>(); - - /** - * This is a hack for GAE, where {@code FileOutputStream} is unavailable. - */ - private static final Class FILE_OUTPUT_STREAM_CLASS = safeGetClass("java.io.FileOutputStream"); - private static final long CHANNEL_FIELD_OFFSET = getChannelFieldOffset(FILE_OUTPUT_STREAM_CLASS); - - /** - * For testing purposes only. Clears the cached buffer to force a new allocation on the next - * invocation. - */ - static void clearCachedBuffer() { - BUFFER.set(null); - } - - /** - * Writes the remaining content of the buffer to the given stream. The buffer {@code position} - * will remain unchanged by this method. - */ - static void write(ByteBuffer buffer, OutputStream output) throws IOException { - final int initialPos = buffer.position(); - try { - if (buffer.hasArray()) { - // Optimized write for array-backed buffers. - // Note that we're taking the risk that a malicious OutputStream could modify the array. - output.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } else if (!writeToChannel(buffer, output)){ - // Read all of the data from the buffer to an array. - // TODO(nathanmittler): Consider performance improvements for other "known" stream types. - final byte[] array = getOrCreateBuffer(buffer.remaining()); - while (buffer.hasRemaining()) { - int length = min(buffer.remaining(), array.length); - buffer.get(array, 0, length); - output.write(array, 0, length); - } - } - } finally { - // Restore the initial position. - buffer.position(initialPos); - } - } - - static byte[] getOrCreateBuffer(int requestedSize) { - requestedSize = max(requestedSize, MIN_CACHED_BUFFER_SIZE); - - byte[] buffer = getBuffer(); - // Only allocate if we need to. - if (buffer == null || needToReallocate(requestedSize, buffer.length)) { - buffer = new byte[requestedSize]; - - // Only cache the buffer if it's not too big. - if (requestedSize <= MAX_CACHED_BUFFER_SIZE) { - setBuffer(buffer); - } - } - return buffer; - } - - private static boolean needToReallocate(int requestedSize, int bufferLength) { - // First check against just the requested length to avoid the multiply. - return bufferLength < requestedSize - && bufferLength < requestedSize * BUFFER_REALLOCATION_THRESHOLD; - } - - private static byte[] getBuffer() { - SoftReference sr = BUFFER.get(); - return sr == null ? null : sr.get(); - } - - private static void setBuffer(byte[] value) { - BUFFER.set(new SoftReference(value)); - } - - private static boolean writeToChannel(ByteBuffer buffer, OutputStream output) throws IOException { - if (CHANNEL_FIELD_OFFSET >= 0 && FILE_OUTPUT_STREAM_CLASS.isInstance(output)) { - // Use a channel to write out the ByteBuffer. This will automatically empty the buffer. - WritableByteChannel channel = null; - try { - channel = (WritableByteChannel) UnsafeUtil.getObject(output, CHANNEL_FIELD_OFFSET); - } catch (ClassCastException e) { - // Absorb. - } - if (channel != null) { - channel.write(buffer); - return true; - } - } - return false; - } - - private static Class safeGetClass(String className) { - try { - return Class.forName(className); - } catch (ClassNotFoundException e) { - return null; - } - } - private static long getChannelFieldOffset(Class clazz) { - try { - if (clazz != null && UnsafeUtil.hasUnsafeArrayOperations()) { - Field field = clazz.getDeclaredField("channel"); - return UnsafeUtil.objectFieldOffset(field); - } - } catch (Throwable e) { - // Absorb - } - return -1; - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java deleted file mode 100644 index a745d37991f..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInput.java +++ /dev/null @@ -1,81 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.IOException; -import java.nio.ByteBuffer; - -/** - * An input for raw bytes. This is similar to an InputStream but it is offset addressable. All the - * read APIs are relative. - */ -@ExperimentalApi -public abstract class ByteInput { - - /** - * Reads a single byte from the given offset. - * @param offset The offset from where byte to be read - * @return The byte of data at given offset - */ - public abstract byte read(int offset); - - /** - * Reads bytes of data from the given offset into an array of bytes. - * @param offset The src offset within this ByteInput from where data to be read. - * @param out Destination byte array to read data into. - * @return The number of bytes read from ByteInput - */ - public int read(int offset, byte b[]) throws IOException { - return read(offset, b, 0, b.length); - } - - /** - * Reads up to len bytes of data from the given offset into an array of bytes. - * @param offset The src offset within this ByteInput from where data to be read. - * @param out Destination byte array to read data into. - * @param outOffset Offset within the the out byte[] where data to be read into. - * @param len The number of bytes to read. - * @return The number of bytes read from ByteInput - */ - public abstract int read(int offset, byte[] out, int outOffset, int len); - - /** - * Reads bytes of data from the given offset into given {@link ByteBuffer}. - * @param offset he src offset within this ByteInput from where data to be read. - * @param out Destination {@link ByteBuffer} to read data into. - * @return The number of bytes read from ByteInput - */ - public abstract int read(int offset, ByteBuffer out); - - /** - * @return Total number of bytes in this ByteInput. - */ - public abstract int size(); -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java deleted file mode 100644 index 30de4ec33ac..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteInputByteString.java +++ /dev/null @@ -1,251 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InvalidObjectException; -import java.io.ObjectInputStream; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.nio.charset.Charset; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -/** - * A {@link ByteString} that wraps around a {@link ByteInput}. - */ -final class ByteInputByteString extends ByteString.LeafByteString { - private final ByteInput buffer; - private final int offset, length; - - ByteInputByteString(ByteInput buffer, int offset, int length) { - if (buffer == null) { - throw new NullPointerException("buffer"); - } - this.buffer = buffer; - this.offset = offset; - this.length = length; - } - - // ================================================================= - // Serializable - - /** - * Magic method that lets us override serialization behavior. - */ - private Object writeReplace() { - return ByteString.wrap(toByteArray()); - } - - /** - * Magic method that lets us override deserialization behavior. - */ - private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException { - throw new InvalidObjectException("ByteInputByteString instances are not to be serialized directly");// TODO check here - } - - // ================================================================= - - @Override - public byte byteAt(int index) { - return buffer.read(getAbsoluteOffset(index)); - } - - private int getAbsoluteOffset(int relativeOffset) { - return this.offset + relativeOffset; - } - - @Override - public int size() { - return length; - } - - @Override - public ByteString substring(int beginIndex, int endIndex) { - if (beginIndex < 0 || beginIndex >= size() || endIndex < beginIndex || endIndex >= size()) { - throw new IllegalArgumentException( - String.format("Invalid indices [%d, %d]", beginIndex, endIndex)); - } - return new ByteInputByteString(this.buffer, getAbsoluteOffset(beginIndex), endIndex - beginIndex); - } - - @Override - protected void copyToInternal( - byte[] target, int sourceOffset, int targetOffset, int numberToCopy) { - this.buffer.read(getAbsoluteOffset(sourceOffset), target, targetOffset, numberToCopy); - } - - @Override - public void copyTo(ByteBuffer target) { - this.buffer.read(this.offset, target); - } - - @Override - public void writeTo(OutputStream out) throws IOException { - out.write(toByteArray());// TODO - } - - @Override - boolean equalsRange(ByteString other, int offset, int length) { - return substring(0, length).equals(other.substring(offset, offset + length)); - } - - @Override - void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) throws IOException { - byte[] buf = ByteBufferWriter.getOrCreateBuffer(numberToWrite); - this.buffer.read(getAbsoluteOffset(sourceOffset), buf, 0, numberToWrite); - out.write(buf, 0, numberToWrite); - } - - @Override - void writeTo(ByteOutput output) throws IOException { - output.writeLazy(toByteArray(), 0, length); - } - - @Override - public ByteBuffer asReadOnlyByteBuffer() { - return ByteBuffer.wrap(toByteArray()).asReadOnlyBuffer(); - } - - @Override - public List asReadOnlyByteBufferList() { - return Collections.singletonList(asReadOnlyByteBuffer()); - } - - @Override - protected String toStringInternal(Charset charset) { - byte[] bytes = toByteArray(); - return new String(bytes, 0, bytes.length, charset); - } - - @Override - public boolean isValidUtf8() { - return Utf8.isValidUtf8(buffer, offset, offset + length); - } - - @Override - protected int partialIsValidUtf8(int state, int offset, int length) { - int off = getAbsoluteOffset(offset); - return Utf8.partialIsValidUtf8(state, buffer, off, off + length); - } - - @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } - if (!(other instanceof ByteString)) { - return false; - } - ByteString otherString = ((ByteString) other); - if (size() != otherString.size()) { - return false; - } - if (size() == 0) { - return true; - } - if (other instanceof RopeByteString) { - return other.equals(this); - } - return Arrays.equals(this.toByteArray(), otherString.toByteArray()); - } - - @Override - protected int partialHash(int h, int offset, int length) { - offset = getAbsoluteOffset(offset); - int end = offset + length; - for (int i = offset; i < end; i++) { - h = h * 31 + buffer.read(i); - } - return h; - } - - @Override - public InputStream newInput() { - return new InputStream() { - private final ByteInput buf = buffer; - private int pos = offset; - private int limit = pos + length; - private int mark = pos; - - @Override - public void mark(int readlimit) { - this.mark = readlimit; - } - - @Override - public boolean markSupported() { - return true; - } - - @Override - public void reset() throws IOException { - this.pos = this.mark; - } - - @Override - public int available() throws IOException { - return this.limit - this.pos; - } - - @Override - public int read() throws IOException { - if (available() <= 0) { - return -1; - } - return this.buf.read(pos++) & 0xFF; - } - - @Override - public int read(byte[] bytes, int off, int len) throws IOException { - int remain = available(); - if (remain <= 0) { - return -1; - } - len = Math.min(len, remain); - buf.read(pos, bytes, off, len); - pos += len; - return len; - } - }; - } - - @Override - public CodedInputStream newCodedInput() { - // We trust CodedInputStream not to modify the bytes, or to give anyone - // else access to them. - CodedInputStream cis = CodedInputStream.newInstance(buffer, offset, length, true); - cis.enableAliasing(true); - return cis; - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteOutput.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteOutput.java deleted file mode 100644 index 8a83fb269b8..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteOutput.java +++ /dev/null @@ -1,116 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.IOException; -import java.nio.ByteBuffer; - -/** - * An output target for raw bytes. This interface provides semantics that support two types of - * writing: - * - *

Traditional write operations: - * (as defined by {@link java.io.OutputStream}) where the target method is responsible for either - * copying the data or completing the write before returning from the method call. - * - *

Lazy write operations: where the caller guarantees that it will never modify the - * provided buffer and it can therefore be considered immutable. The target method is free to - * maintain a reference to the buffer beyond the scope of the method call (e.g. until the write - * operation completes). - */ -@ExperimentalApi -public abstract class ByteOutput { - /** - * Writes a single byte. - * - * @param value the byte to be written - * @throws IOException thrown if an error occurred while writing - */ - public abstract void write(byte value) throws IOException; - - /** - * Writes a sequence of bytes. The {@link ByteOutput} must copy {@code value} if it will - * not be processed prior to the return of this method call, since {@code value} may be - * reused/altered by the caller. - * - *

NOTE: This method MUST NOT modify the {@code value}. Doing so is a - * programming error and will lead to data corruption which will be difficult to debug. - * - * @param value the bytes to be written - * @param offset the offset of the start of the writable range - * @param length the number of bytes to write starting from {@code offset} - * @throws IOException thrown if an error occurred while writing - */ - public abstract void write(byte[] value, int offset, int length) throws IOException; - - /** - * Writes a sequence of bytes. The {@link ByteOutput} is free to retain a reference to the value - * beyond the scope of this method call (e.g. write later) since it is considered immutable and is - * guaranteed not to change by the caller. - * - *

NOTE: This method MUST NOT modify the {@code value}. Doing so is a - * programming error and will lead to data corruption which will be difficult to debug. - * - * @param value the bytes to be written - * @param offset the offset of the start of the writable range - * @param length the number of bytes to write starting from {@code offset} - * @throws IOException thrown if an error occurred while writing - */ - public abstract void writeLazy(byte[] value, int offset, int length) throws IOException; - - /** - * Writes a sequence of bytes. The {@link ByteOutput} must copy {@code value} if it will - * not be processed prior to the return of this method call, since {@code value} may be - * reused/altered by the caller. - * - *

NOTE: This method MUST NOT modify the {@code value}. Doing so is a - * programming error and will lead to data corruption which will be difficult to debug. - * - * @param value the bytes to be written. Upon returning from this call, the {@code position} of - * this buffer will be set to the {@code limit} - * @throws IOException thrown if an error occurred while writing - */ - public abstract void write(ByteBuffer value) throws IOException; - - /** - * Writes a sequence of bytes. The {@link ByteOutput} is free to retain a reference to the value - * beyond the scope of this method call (e.g. write later) since it is considered immutable and is - * guaranteed not to change by the caller. - * - *

NOTE: This method MUST NOT modify the {@code value}. Doing so is a - * programming error and will lead to data corruption which will be difficult to debug. - * - * @param value the bytes to be written. Upon returning from this call, the {@code position} of - * this buffer will be set to the {@code limit} - * @throws IOException thrown if an error occurred while writing - */ - public abstract void writeLazy(ByteBuffer value) throws IOException; -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java deleted file mode 100644 index f673ee3aecd..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ByteString.java +++ /dev/null @@ -1,1565 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InvalidObjectException; -import java.io.ObjectInputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.io.UnsupportedEncodingException; -import java.nio.ByteBuffer; -import java.nio.charset.Charset; -import java.nio.charset.UnsupportedCharsetException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; - -/** - * Immutable sequence of bytes. Substring is supported by sharing the reference to the immutable - * underlying bytes. Concatenation is likewise supported without copying (long strings) by building - * a tree of pieces in {@link RopeByteString}. - * - *

Like {@link String}, the contents of a {@link ByteString} can never be observed to change, not - * even in the presence of a data race or incorrect API usage in the client code. - * - * @author crazybob@google.com Bob Lee - * @author kenton@google.com Kenton Varda - * @author carlanton@google.com Carl Haverl - * @author martinrb@google.com Martin Buchholz - */ -public abstract class ByteString implements Iterable, Serializable { - - /** - * When two strings to be concatenated have a combined length shorter than - * this, we just copy their bytes on {@link #concat(ByteString)}. - * The trade-off is copy size versus the overhead of creating tree nodes - * in {@link RopeByteString}. - */ - static final int CONCATENATE_BY_COPY_SIZE = 128; - - /** - * When copying an InputStream into a ByteString with .readFrom(), - * the chunks in the underlying rope start at 256 bytes, but double - * each iteration up to 8192 bytes. - */ - static final int MIN_READ_FROM_CHUNK_SIZE = 0x100; // 256b - static final int MAX_READ_FROM_CHUNK_SIZE = 0x2000; // 8k - - /** - * Empty {@code ByteString}. - */ - public static final ByteString EMPTY = new LiteralByteString(Internal.EMPTY_BYTE_ARRAY); - - /** - * An interface to efficiently copy {@code byte[]}. - * - *

One of the noticeable costs of copying a byte[] into a new array using - * {@code System.arraycopy} is nullification of a new buffer before the copy. It has been shown - * the Hotspot VM is capable to intrisicfy {@code Arrays.copyOfRange} operation to avoid this - * expensive nullification and provide substantial performance gain. Unfortunately this does not - * hold on Android runtimes and could make the copy slightly slower due to additional code in - * the {@code Arrays.copyOfRange}. Thus we provide two different implementation for array copier - * for Hotspot and Android runtimes. - */ - private interface ByteArrayCopier { - /** - * Copies the specified range of the specified array into a new array - */ - byte[] copyFrom(byte[] bytes, int offset, int size); - } - - /** Implementation of {@code ByteArrayCopier} which uses {@link System#arraycopy}. */ - private static final class SystemByteArrayCopier implements ByteArrayCopier { - @Override - public byte[] copyFrom(byte[] bytes, int offset, int size) { - byte[] copy = new byte[size]; - System.arraycopy(bytes, offset, copy, 0, size); - return copy; - } - } - - /** Implementation of {@code ByteArrayCopier} which uses {@link Arrays#copyOfRange}. */ - private static final class ArraysByteArrayCopier implements ByteArrayCopier { - @Override - public byte[] copyFrom(byte[] bytes, int offset, int size) { - return Arrays.copyOfRange(bytes, offset, offset + size); - } - } - - private static final ByteArrayCopier byteArrayCopier; - static { - boolean isAndroid = true; - try { - Class.forName("android.content.Context"); - } catch (ClassNotFoundException e) { - isAndroid = false; - } - - byteArrayCopier = isAndroid ? new SystemByteArrayCopier() : new ArraysByteArrayCopier(); - } - - /** - * Cached hash value. Intentionally accessed via a data race, which - * is safe because of the Java Memory Model's "no out-of-thin-air values" - * guarantees for ints. A value of 0 implies that the hash has not been set. - */ - private int hash = 0; - - // This constructor is here to prevent subclassing outside of this package, - ByteString() {} - - /** - * Gets the byte at the given index. This method should be used only for - * random access to individual bytes. To access bytes sequentially, use the - * {@link ByteIterator} returned by {@link #iterator()}, and call {@link - * #substring(int, int)} first if necessary. - * - * @param index index of byte - * @return the value - * @throws IndexOutOfBoundsException {@code index < 0 or index >= size} - */ - public abstract byte byteAt(int index); - - /** - * Return a {@link ByteString.ByteIterator} over the bytes in the ByteString. - * To avoid auto-boxing, you may get the iterator manually and call - * {@link ByteIterator#nextByte()}. - * - * @return the iterator - */ - @Override - public final ByteIterator iterator() { - return new ByteIterator() { - private int position = 0; - private final int limit = size(); - - @Override - public boolean hasNext() { - return position < limit; - } - - @Override - public Byte next() { - // Boxing calls Byte.valueOf(byte), which does not instantiate. - return nextByte(); - } - - @Override - public byte nextByte() { - try { - return byteAt(position++); - } catch (IndexOutOfBoundsException e) { - throw new NoSuchElementException(e.getMessage()); - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - /** - * This interface extends {@code Iterator}, so that we can return an - * unboxed {@code byte}. - */ - public interface ByteIterator extends Iterator { - /** - * An alternative to {@link Iterator#next()} that returns an - * unboxed primitive {@code byte}. - * - * @return the next {@code byte} in the iteration - * @throws NoSuchElementException if the iteration has no more elements - */ - byte nextByte(); - } - - /** - * Gets the number of bytes. - * - * @return size in bytes - */ - public abstract int size(); - - /** - * Returns {@code true} if the size is {@code 0}, {@code false} otherwise. - * - * @return true if this is zero bytes long - */ - public final boolean isEmpty() { - return size() == 0; - } - - // ================================================================= - // ByteString -> substring - - /** - * Return the substring from {@code beginIndex}, inclusive, to the end of the - * string. - * - * @param beginIndex start at this index - * @return substring sharing underlying data - * @throws IndexOutOfBoundsException if {@code beginIndex < 0} or - * {@code beginIndex > size()}. - */ - public final ByteString substring(int beginIndex) { - return substring(beginIndex, size()); - } - - /** - * Return the substring from {@code beginIndex}, inclusive, to {@code - * endIndex}, exclusive. - * - * @param beginIndex start at this index - * @param endIndex the last character is the one before this index - * @return substring sharing underlying data - * @throws IndexOutOfBoundsException if {@code beginIndex < 0}, - * {@code endIndex > size()}, or {@code beginIndex > endIndex}. - */ - public abstract ByteString substring(int beginIndex, int endIndex); - - /** - * Tests if this bytestring starts with the specified prefix. - * Similar to {@link String#startsWith(String)} - * - * @param prefix the prefix. - * @return true if the byte sequence represented by the - * argument is a prefix of the byte sequence represented by - * this string; false otherwise. - */ - public final boolean startsWith(ByteString prefix) { - return size() >= prefix.size() && - substring(0, prefix.size()).equals(prefix); - } - - /** - * Tests if this bytestring ends with the specified suffix. - * Similar to {@link String#endsWith(String)} - * - * @param suffix the suffix. - * @return true if the byte sequence represented by the - * argument is a suffix of the byte sequence represented by - * this string; false otherwise. - */ - public final boolean endsWith(ByteString suffix) { - return size() >= suffix.size() && - substring(size() - suffix.size()).equals(suffix); - } - - // ================================================================= - // byte[] -> ByteString - - /** - * Copies the given bytes into a {@code ByteString}. - * - * @param bytes source array - * @param offset offset in source array - * @param size number of bytes to copy - * @return new {@code ByteString} - */ - public static ByteString copyFrom(byte[] bytes, int offset, int size) { - return new LiteralByteString(byteArrayCopier.copyFrom(bytes, offset, size)); - } - - /** - * Copies the given bytes into a {@code ByteString}. - * - * @param bytes to copy - * @return new {@code ByteString} - */ - public static ByteString copyFrom(byte[] bytes) { - return copyFrom(bytes, 0, bytes.length); - } - - /** - * Wraps the given bytes into a {@code ByteString}. Intended for internal only usage. - */ - static ByteString wrap(ByteBuffer buffer) { - if (buffer.hasArray()) { - final int offset = buffer.arrayOffset(); - return ByteString.wrap(buffer.array(), offset + buffer.position(), buffer.remaining()); - } else { - return new NioByteString(buffer); - } - } - - /** - * Wraps the given bytes into a {@code ByteString}. Intended for internal only usage. - */ - static ByteString wrap(ByteInput buffer, int offset, int length) { - return new ByteInputByteString(buffer, offset, length); - } - - /** - * Wraps the given bytes into a {@code ByteString}. Intended for internal only - * usage to force a classload of ByteString before LiteralByteString. - */ - static ByteString wrap(byte[] bytes) { - // TODO(dweis): Return EMPTY when bytes are empty to reduce allocations? - return new LiteralByteString(bytes); - } - - /** - * Wraps the given bytes into a {@code ByteString}. Intended for internal only - * usage to force a classload of ByteString before BoundedByteString and - * LiteralByteString. - */ - static ByteString wrap(byte[] bytes, int offset, int length) { - return new BoundedByteString(bytes, offset, length); - } - - /** - * Copies the next {@code size} bytes from a {@code java.nio.ByteBuffer} into - * a {@code ByteString}. - * - * @param bytes source buffer - * @param size number of bytes to copy - * @return new {@code ByteString} - */ - public static ByteString copyFrom(ByteBuffer bytes, int size) { - byte[] copy = new byte[size]; - bytes.get(copy); - return new LiteralByteString(copy); - } - - /** - * Copies the remaining bytes from a {@code java.nio.ByteBuffer} into - * a {@code ByteString}. - * - * @param bytes sourceBuffer - * @return new {@code ByteString} - */ - public static ByteString copyFrom(ByteBuffer bytes) { - return copyFrom(bytes, bytes.remaining()); - } - - /** - * Encodes {@code text} into a sequence of bytes using the named charset - * and returns the result as a {@code ByteString}. - * - * @param text source string - * @param charsetName encoding to use - * @return new {@code ByteString} - * @throws UnsupportedEncodingException if the encoding isn't found - */ - public static ByteString copyFrom(String text, String charsetName) - throws UnsupportedEncodingException { - return new LiteralByteString(text.getBytes(charsetName)); - } - - /** - * Encodes {@code text} into a sequence of bytes using the named charset - * and returns the result as a {@code ByteString}. - * - * @param text source string - * @param charset encode using this charset - * @return new {@code ByteString} - */ - public static ByteString copyFrom(String text, Charset charset) { - return new LiteralByteString(text.getBytes(charset)); - } - - /** - * Encodes {@code text} into a sequence of UTF-8 bytes and returns the - * result as a {@code ByteString}. - * - * @param text source string - * @return new {@code ByteString} - */ - public static ByteString copyFromUtf8(String text) { - return new LiteralByteString(text.getBytes(Internal.UTF_8)); - } - - // ================================================================= - // InputStream -> ByteString - - /** - * Completely reads the given stream's bytes into a - * {@code ByteString}, blocking if necessary until all bytes are - * read through to the end of the stream. - * - * Performance notes: The returned {@code ByteString} is an - * immutable tree of byte arrays ("chunks") of the stream data. The - * first chunk is small, with subsequent chunks each being double - * the size, up to 8K. - * - *

Each byte read from the input stream will be copied twice to ensure - * that the resulting ByteString is truly immutable. - * - * @param streamToDrain The source stream, which is read completely - * but not closed. - * @return A new {@code ByteString} which is made up of chunks of - * various sizes, depending on the behavior of the underlying - * stream. - * @throws IOException IOException is thrown if there is a problem - * reading the underlying stream. - */ - public static ByteString readFrom(InputStream streamToDrain) - throws IOException { - return readFrom(streamToDrain, MIN_READ_FROM_CHUNK_SIZE, MAX_READ_FROM_CHUNK_SIZE); - } - - /** - * Completely reads the given stream's bytes into a - * {@code ByteString}, blocking if necessary until all bytes are - * read through to the end of the stream. - * - * Performance notes: The returned {@code ByteString} is an - * immutable tree of byte arrays ("chunks") of the stream data. The - * chunkSize parameter sets the size of these byte arrays. - * - *

Each byte read from the input stream will be copied twice to ensure - * that the resulting ByteString is truly immutable. - * - * @param streamToDrain The source stream, which is read completely - * but not closed. - * @param chunkSize The size of the chunks in which to read the - * stream. - * @return A new {@code ByteString} which is made up of chunks of - * the given size. - * @throws IOException IOException is thrown if there is a problem - * reading the underlying stream. - */ - public static ByteString readFrom(InputStream streamToDrain, int chunkSize) - throws IOException { - return readFrom(streamToDrain, chunkSize, chunkSize); - } - - // Helper method that takes the chunk size range as a parameter. - public static ByteString readFrom(InputStream streamToDrain, int minChunkSize, - int maxChunkSize) throws IOException { - Collection results = new ArrayList(); - - // copy the inbound bytes into a list of chunks; the chunk size - // grows exponentially to support both short and long streams. - int chunkSize = minChunkSize; - while (true) { - ByteString chunk = readChunk(streamToDrain, chunkSize); - if (chunk == null) { - break; - } - results.add(chunk); - chunkSize = Math.min(chunkSize * 2, maxChunkSize); - } - - return ByteString.copyFrom(results); - } - - /** - * Blocks until a chunk of the given size can be made from the - * stream, or EOF is reached. Calls read() repeatedly in case the - * given stream implementation doesn't completely fill the given - * buffer in one read() call. - * - * @return A chunk of the desired size, or else a chunk as large as - * was available when end of stream was reached. Returns null if the - * given stream had no more data in it. - */ - private static ByteString readChunk(InputStream in, final int chunkSize) - throws IOException { - final byte[] buf = new byte[chunkSize]; - int bytesRead = 0; - while (bytesRead < chunkSize) { - final int count = in.read(buf, bytesRead, chunkSize - bytesRead); - if (count == -1) { - break; - } - bytesRead += count; - } - - if (bytesRead == 0) { - return null; - } - - // Always make a copy since InputStream could steal a reference to buf. - return ByteString.copyFrom(buf, 0, bytesRead); - } - - // ================================================================= - // Multiple ByteStrings -> One ByteString - - /** - * Concatenate the given {@code ByteString} to this one. Short concatenations, - * of total size smaller than {@link ByteString#CONCATENATE_BY_COPY_SIZE}, are - * produced by copying the underlying bytes (as per Rope.java, - * BAP95 . In general, the concatenate involves no copying. - * - * @param other string to concatenate - * @return a new {@code ByteString} instance - */ - public final ByteString concat(ByteString other) { - if (Integer.MAX_VALUE - size() < other.size()) { - throw new IllegalArgumentException("ByteString would be too long: " + - size() + "+" + other.size()); - } - - return RopeByteString.concatenate(this, other); - } - - /** - * Concatenates all byte strings in the iterable and returns the result. - * This is designed to run in O(list size), not O(total bytes). - * - *

The returned {@code ByteString} is not necessarily a unique object. - * If the list is empty, the returned object is the singleton empty - * {@code ByteString}. If the list has only one element, that - * {@code ByteString} will be returned without copying. - * - * @param byteStrings strings to be concatenated - * @return new {@code ByteString} - */ - public static ByteString copyFrom(Iterable byteStrings) { - // Determine the size; - final int size; - if (!(byteStrings instanceof Collection)) { - int tempSize = 0; - for (Iterator iter = byteStrings.iterator(); iter.hasNext(); - iter.next(), ++tempSize) { - } - size = tempSize; - } else { - size = ((Collection) byteStrings).size(); - } - - if (size == 0) { - return EMPTY; - } - - return balancedConcat(byteStrings.iterator(), size); - } - - // Internal function used by copyFrom(Iterable). - // Create a balanced concatenation of the next "length" elements from the - // iterable. - private static ByteString balancedConcat(Iterator iterator, int length) { - if (length < 1) { - throw new IllegalArgumentException(String.format("length (%s) must be >= 1", length)); - } - ByteString result; - if (length == 1) { - result = iterator.next(); - } else { - int halfLength = length >>> 1; - ByteString left = balancedConcat(iterator, halfLength); - ByteString right = balancedConcat(iterator, length - halfLength); - result = left.concat(right); - } - return result; - } - - // ================================================================= - // ByteString -> byte[] - - /** - * Copies bytes into a buffer at the given offset. - * - * @param target buffer to copy into - * @param offset in the target buffer - * @throws IndexOutOfBoundsException if the offset is negative or too large - */ - public void copyTo(byte[] target, int offset) { - copyTo(target, 0, offset, size()); - } - - /** - * Copies bytes into a buffer. - * - * @param target buffer to copy into - * @param sourceOffset offset within these bytes - * @param targetOffset offset within the target buffer - * @param numberToCopy number of bytes to copy - * @throws IndexOutOfBoundsException if an offset or size is negative or too - * large - */ - public final void copyTo(byte[] target, int sourceOffset, int targetOffset, - int numberToCopy) { - checkRange(sourceOffset, sourceOffset + numberToCopy, size()); - checkRange(targetOffset, targetOffset + numberToCopy, target.length); - if (numberToCopy > 0) { - copyToInternal(target, sourceOffset, targetOffset, numberToCopy); - } - } - - /** - * Internal (package private) implementation of - * {@link #copyTo(byte[],int,int,int)}. - * It assumes that all error checking has already been performed and that - * {@code numberToCopy > 0}. - */ - protected abstract void copyToInternal(byte[] target, int sourceOffset, - int targetOffset, int numberToCopy); - - /** - * Copies bytes into a ByteBuffer. - * - * @param target ByteBuffer to copy into. - * @throws java.nio.ReadOnlyBufferException if the {@code target} is read-only - * @throws java.nio.BufferOverflowException if the {@code target}'s - * remaining() space is not large enough to hold the data. - */ - public abstract void copyTo(ByteBuffer target); - - /** - * Copies bytes to a {@code byte[]}. - * - * @return copied bytes - */ - public final byte[] toByteArray() { - final int size = size(); - if (size == 0) { - return Internal.EMPTY_BYTE_ARRAY; - } - byte[] result = new byte[size]; - copyToInternal(result, 0, 0, size); - return result; - } - - /** - * Writes a copy of the contents of this byte string to the specified output stream argument. - * - * @param out the output stream to which to write the data. - * @throws IOException if an I/O error occurs. - */ - public abstract void writeTo(OutputStream out) throws IOException; - - /** - * Writes a specified part of this byte string to an output stream. - * - * @param out the output stream to which to write the data. - * @param sourceOffset offset within these bytes - * @param numberToWrite number of bytes to write - * @throws IOException if an I/O error occurs. - * @throws IndexOutOfBoundsException if an offset or size is negative or too large - */ - final void writeTo(OutputStream out, int sourceOffset, int numberToWrite) - throws IOException { - checkRange(sourceOffset, sourceOffset + numberToWrite, size()); - if (numberToWrite > 0) { - writeToInternal(out, sourceOffset, numberToWrite); - } - } - - /** - * Internal version of {@link #writeTo(OutputStream,int,int)} that assumes - * all error checking has already been done. - */ - abstract void writeToInternal(OutputStream out, int sourceOffset, int numberToWrite) - throws IOException; - - /** - * Writes this {@link ByteString} to the provided {@link ByteOutput}. Calling - * this method may result in multiple operations on the target {@link ByteOutput}. - * - *

This method may expose internal backing buffers of the {@link ByteString} to the {@link - * ByteOutput} in order to avoid additional copying overhead. It would be possible for a malicious - * {@link ByteOutput} to corrupt the {@link ByteString}. Use with caution! - * - * @param byteOutput the output target to receive the bytes - * @throws IOException if an I/O error occurs - * @see UnsafeByteOperations#unsafeWriteTo(ByteString, ByteOutput) - */ - abstract void writeTo(ByteOutput byteOutput) throws IOException; - - - /** - * Constructs a read-only {@code java.nio.ByteBuffer} whose content - * is equal to the contents of this byte string. - * The result uses the same backing array as the byte string, if possible. - * - * @return wrapped bytes - */ - public abstract ByteBuffer asReadOnlyByteBuffer(); - - /** - * Constructs a list of read-only {@code java.nio.ByteBuffer} objects - * such that the concatenation of their contents is equal to the contents - * of this byte string. The result uses the same backing arrays as the - * byte string. - *

- * By returning a list, implementations of this method may be able to avoid - * copying even when there are multiple backing arrays. - * - * @return a list of wrapped bytes - */ - public abstract List asReadOnlyByteBufferList(); - - /** - * Constructs a new {@code String} by decoding the bytes using the - * specified charset. - * - * @param charsetName encode using this charset - * @return new string - * @throws UnsupportedEncodingException if charset isn't recognized - */ - public final String toString(String charsetName) - throws UnsupportedEncodingException { - try { - return toString(Charset.forName(charsetName)); - } catch (UnsupportedCharsetException e) { - UnsupportedEncodingException exception = new UnsupportedEncodingException(charsetName); - exception.initCause(e); - throw exception; - } - } - - /** - * Constructs a new {@code String} by decoding the bytes using the - * specified charset. Returns the same empty String if empty. - * - * @param charset encode using this charset - * @return new string - */ - public final String toString(Charset charset) { - return size() == 0 ? "" : toStringInternal(charset); - } - - /** - * Constructs a new {@code String} by decoding the bytes using the - * specified charset. - * - * @param charset encode using this charset - * @return new string - */ - protected abstract String toStringInternal(Charset charset); - - // ================================================================= - // UTF-8 decoding - - /** - * Constructs a new {@code String} by decoding the bytes as UTF-8. - * - * @return new string using UTF-8 encoding - */ - public final String toStringUtf8() { - return toString(Internal.UTF_8); - } - - /** - * Tells whether this {@code ByteString} represents a well-formed UTF-8 - * byte sequence, such that the original bytes can be converted to a - * String object and then round tripped back to bytes without loss. - * - *

More precisely, returns {@code true} whenever:

 {@code
-   * Arrays.equals(byteString.toByteArray(),
-   *     new String(byteString.toByteArray(), "UTF-8").getBytes("UTF-8"))
-   * }
- * - *

This method returns {@code false} for "overlong" byte sequences, - * as well as for 3-byte sequences that would map to a surrogate - * character, in accordance with the restricted definition of UTF-8 - * introduced in Unicode 3.1. Note that the UTF-8 decoder included in - * Oracle's JDK has been modified to also reject "overlong" byte - * sequences, but (as of 2011) still accepts 3-byte surrogate - * character byte sequences. - * - *

See the Unicode Standard,
- * Table 3-6. UTF-8 Bit Distribution,
- * Table 3-7. Well Formed UTF-8 Byte Sequences. - * - * @return whether the bytes in this {@code ByteString} are a - * well-formed UTF-8 byte sequence - */ - public abstract boolean isValidUtf8(); - - /** - * Tells whether the given byte sequence is a well-formed, malformed, or - * incomplete UTF-8 byte sequence. This method accepts and returns a partial - * state result, allowing the bytes for a complete UTF-8 byte sequence to be - * composed from multiple {@code ByteString} segments. - * - * @param state either {@code 0} (if this is the initial decoding operation) - * or the value returned from a call to a partial decoding method for the - * previous bytes - * @param offset offset of the first byte to check - * @param length number of bytes to check - * - * @return {@code -1} if the partial byte sequence is definitely malformed, - * {@code 0} if it is well-formed (no additional input needed), or, if the - * byte sequence is "incomplete", i.e. apparently terminated in the middle of - * a character, an opaque integer "state" value containing enough information - * to decode the character when passed to a subsequent invocation of a - * partial decoding method. - */ - protected abstract int partialIsValidUtf8(int state, int offset, int length); - - // ================================================================= - // equals() and hashCode() - - @Override - public abstract boolean equals(Object o); - - /** - * Base class for leaf {@link ByteString}s (i.e. non-ropes). - */ - abstract static class LeafByteString extends ByteString { - @Override - protected final int getTreeDepth() { - return 0; - } - - @Override - protected final boolean isBalanced() { - return true; - } - - - /** - * Check equality of the substring of given length of this object starting at - * zero with another {@code ByteString} substring starting at offset. - * - * @param other what to compare a substring in - * @param offset offset into other - * @param length number of bytes to compare - * @return true for equality of substrings, else false. - */ - abstract boolean equalsRange(ByteString other, int offset, int length); - } - - /** - * Compute the hashCode using the traditional algorithm from {@link - * ByteString}. - * - * @return hashCode value - */ - @Override - public final int hashCode() { - int h = hash; - - if (h == 0) { - int size = size(); - h = partialHash(size, 0, size); - if (h == 0) { - h = 1; - } - hash = h; - } - return h; - } - - // ================================================================= - // Input stream - - /** - * Creates an {@code InputStream} which can be used to read the bytes. - *

- * The {@link InputStream} returned by this method is guaranteed to be - * completely non-blocking. The method {@link InputStream#available()} - * returns the number of bytes remaining in the stream. The methods - * {@link InputStream#read(byte[])}, {@link InputStream#read(byte[],int,int)} - * and {@link InputStream#skip(long)} will read/skip as many bytes as are - * available. The method {@link InputStream#markSupported()} returns - * {@code true}. - *

- * The methods in the returned {@link InputStream} might not be - * thread safe. - * - * @return an input stream that returns the bytes of this byte string. - */ - public abstract InputStream newInput(); - - /** - * Creates a {@link CodedInputStream} which can be used to read the bytes. - * Using this is often more efficient than creating a {@link CodedInputStream} - * that wraps the result of {@link #newInput()}. - * - * @return stream based on wrapped data - */ - public abstract CodedInputStream newCodedInput(); - - // ================================================================= - // Output stream - - /** - * Creates a new {@link Output} with the given initial capacity. Call {@link - * Output#toByteString()} to create the {@code ByteString} instance. - *

- * A {@link ByteString.Output} offers the same functionality as a - * {@link ByteArrayOutputStream}, except that it returns a {@link ByteString} - * rather than a {@code byte} array. - * - * @param initialCapacity estimate of number of bytes to be written - * @return {@code OutputStream} for building a {@code ByteString} - */ - public static Output newOutput(int initialCapacity) { - return new Output(initialCapacity); - } - - /** - * Creates a new {@link Output}. Call {@link Output#toByteString()} to create - * the {@code ByteString} instance. - *

- * A {@link ByteString.Output} offers the same functionality as a - * {@link ByteArrayOutputStream}, except that it returns a {@link ByteString} - * rather than a {@code byte array}. - * - * @return {@code OutputStream} for building a {@code ByteString} - */ - public static Output newOutput() { - return new Output(CONCATENATE_BY_COPY_SIZE); - } - - /** - * Outputs to a {@code ByteString} instance. Call {@link #toByteString()} to - * create the {@code ByteString} instance. - */ - public static final class Output extends OutputStream { - // Implementation note. - // The public methods of this class must be synchronized. ByteStrings - // are guaranteed to be immutable. Without some sort of locking, it could - // be possible for one thread to call toByteSring(), while another thread - // is still modifying the underlying byte array. - - private static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; - // argument passed by user, indicating initial capacity. - private final int initialCapacity; - // ByteStrings to be concatenated to create the result - private final ArrayList flushedBuffers; - // Total number of bytes in the ByteStrings of flushedBuffers - private int flushedBuffersTotalBytes; - // Current buffer to which we are writing - private byte[] buffer; - // Location in buffer[] to which we write the next byte. - private int bufferPos; - - /** - * Creates a new ByteString output stream with the specified - * initial capacity. - * - * @param initialCapacity the initial capacity of the output stream. - */ - Output(int initialCapacity) { - if (initialCapacity < 0) { - throw new IllegalArgumentException("Buffer size < 0"); - } - this.initialCapacity = initialCapacity; - this.flushedBuffers = new ArrayList(); - this.buffer = new byte[initialCapacity]; - } - - @Override - public synchronized void write(int b) { - if (bufferPos == buffer.length) { - flushFullBuffer(1); - } - buffer[bufferPos++] = (byte)b; - } - - @Override - public synchronized void write(byte[] b, int offset, int length) { - if (length <= buffer.length - bufferPos) { - // The bytes can fit into the current buffer. - System.arraycopy(b, offset, buffer, bufferPos, length); - bufferPos += length; - } else { - // Use up the current buffer - int copySize = buffer.length - bufferPos; - System.arraycopy(b, offset, buffer, bufferPos, copySize); - offset += copySize; - length -= copySize; - // Flush the buffer, and get a new buffer at least big enough to cover - // what we still need to output - flushFullBuffer(length); - System.arraycopy(b, offset, buffer, 0 /* count */, length); - bufferPos = length; - } - } - - /** - * Creates a byte string. Its size is the current size of this output - * stream and its output has been copied to it. - * - * @return the current contents of this output stream, as a byte string. - */ - public synchronized ByteString toByteString() { - flushLastBuffer(); - return ByteString.copyFrom(flushedBuffers); - } - - /** - * Implement java.util.Arrays.copyOf() for jdk 1.5. - */ - private byte[] copyArray(byte[] buffer, int length) { - byte[] result = new byte[length]; - System.arraycopy(buffer, 0, result, 0, Math.min(buffer.length, length)); - return result; - } - - /** - * Writes the complete contents of this byte array output stream to - * the specified output stream argument. - * - * @param out the output stream to which to write the data. - * @throws IOException if an I/O error occurs. - */ - public void writeTo(OutputStream out) throws IOException { - ByteString[] cachedFlushBuffers; - byte[] cachedBuffer; - int cachedBufferPos; - synchronized (this) { - // Copy the information we need into local variables so as to hold - // the lock for as short a time as possible. - cachedFlushBuffers = - flushedBuffers.toArray(new ByteString[flushedBuffers.size()]); - cachedBuffer = buffer; - cachedBufferPos = bufferPos; - } - for (ByteString byteString : cachedFlushBuffers) { - byteString.writeTo(out); - } - - out.write(copyArray(cachedBuffer, cachedBufferPos)); - } - - /** - * Returns the current size of the output stream. - * - * @return the current size of the output stream - */ - public synchronized int size() { - return flushedBuffersTotalBytes + bufferPos; - } - - /** - * Resets this stream, so that all currently accumulated output in the - * output stream is discarded. The output stream can be used again, - * reusing the already allocated buffer space. - */ - public synchronized void reset() { - flushedBuffers.clear(); - flushedBuffersTotalBytes = 0; - bufferPos = 0; - } - - @Override - public String toString() { - return String.format("", - Integer.toHexString(System.identityHashCode(this)), size()); - } - - /** - * Internal function used by writers. The current buffer is full, and the - * writer needs a new buffer whose size is at least the specified minimum - * size. - */ - private void flushFullBuffer(int minSize) { - flushedBuffers.add(new LiteralByteString(buffer)); - flushedBuffersTotalBytes += buffer.length; - // We want to increase our total capacity by 50%, but as a minimum, - // the new buffer should also at least be >= minSize and - // >= initial Capacity. - int newSize = Math.max(initialCapacity, - Math.max(minSize, flushedBuffersTotalBytes >>> 1)); - buffer = new byte[newSize]; - bufferPos = 0; - } - - /** - * Internal function used by {@link #toByteString()}. The current buffer may - * or may not be full, but it needs to be flushed. - */ - private void flushLastBuffer() { - if (bufferPos < buffer.length) { - if (bufferPos > 0) { - byte[] bufferCopy = copyArray(buffer, bufferPos); - flushedBuffers.add(new LiteralByteString(bufferCopy)); - } - // We reuse this buffer for further writes. - } else { - // Buffer is completely full. Huzzah. - flushedBuffers.add(new LiteralByteString(buffer)); - // 99% of the time, we're not going to use this OutputStream again. - // We set buffer to an empty byte stream so that we're handling this - // case without wasting space. In the rare case that more writes - // *do* occur, this empty buffer will be flushed and an appropriately - // sized new buffer will be created. - buffer = EMPTY_BYTE_ARRAY; - } - flushedBuffersTotalBytes += bufferPos; - bufferPos = 0; - } - } - - /** - * Constructs a new {@code ByteString} builder, which allows you to - * efficiently construct a {@code ByteString} by writing to a {@link - * CodedOutputStream}. Using this is much more efficient than calling {@code - * newOutput()} and wrapping that in a {@code CodedOutputStream}. - * - *

This is package-private because it's a somewhat confusing interface. - * Users can call {@link Message#toByteString()} instead of calling this - * directly. - * - * @param size The target byte size of the {@code ByteString}. You must write - * exactly this many bytes before building the result. - * @return the builder - */ - static CodedBuilder newCodedBuilder(int size) { - return new CodedBuilder(size); - } - - /** See {@link ByteString#newCodedBuilder(int)}. */ - static final class CodedBuilder { - private final CodedOutputStream output; - private final byte[] buffer; - - private CodedBuilder(int size) { - buffer = new byte[size]; - output = CodedOutputStream.newInstance(buffer); - } - - public ByteString build() { - output.checkNoSpaceLeft(); - - // We can be confident that the CodedOutputStream will not modify the - // underlying bytes anymore because it already wrote all of them. So, - // no need to make a copy. - return new LiteralByteString(buffer); - } - - public CodedOutputStream getCodedOutput() { - return output; - } - } - - // ================================================================= - // Methods {@link RopeByteString} needs on instances, which aren't part of the - // public API. - - /** - * Return the depth of the tree representing this {@code ByteString}, if any, - * whose root is this node. If this is a leaf node, return 0. - * - * @return tree depth or zero - */ - protected abstract int getTreeDepth(); - - /** - * Return {@code true} if this ByteString is literal (a leaf node) or a - * flat-enough tree in the sense of {@link RopeByteString}. - * - * @return true if the tree is flat enough - */ - protected abstract boolean isBalanced(); - - /** - * Return the cached hash code if available. - * - * @return value of cached hash code or 0 if not computed yet - */ - protected final int peekCachedHashCode() { - return hash; - } - - /** - * Compute the hash across the value bytes starting with the given hash, and - * return the result. This is used to compute the hash across strings - * represented as a set of pieces by allowing the hash computation to be - * continued from piece to piece. - * - * @param h starting hash value - * @param offset offset into this value to start looking at data values - * @param length number of data values to include in the hash computation - * @return ending hash value - */ - protected abstract int partialHash(int h, int offset, int length); - - /** - * Checks that the given index falls within the specified array size. - * - * @param index the index position to be tested - * @param size the length of the array - * @throws IndexOutOfBoundsException if the index does not fall within the array. - */ - static void checkIndex(int index, int size) { - if ((index | (size - (index + 1))) < 0) { - if (index < 0) { - throw new ArrayIndexOutOfBoundsException("Index < 0: " + index); - } - throw new ArrayIndexOutOfBoundsException("Index > length: " + index + ", " + size); - } - } - - /** - * Checks that the given range falls within the bounds of an array - * - * @param startIndex the start index of the range (inclusive) - * @param endIndex the end index of the range (exclusive) - * @param size the size of the array. - * @return the length of the range. - * @throws IndexOutOfBoundsException some or all of the range falls outside of the array. - */ - static int checkRange(int startIndex, int endIndex, int size) { - final int length = endIndex - startIndex; - if ((startIndex | endIndex | length | (size - endIndex)) < 0) { - if (startIndex < 0) { - throw new IndexOutOfBoundsException("Beginning index: " + startIndex + " < 0"); - } - if (endIndex < startIndex) { - throw new IndexOutOfBoundsException( - "Beginning index larger than ending index: " + startIndex + ", " + endIndex); - } - // endIndex >= size - throw new IndexOutOfBoundsException("End index: " + endIndex + " >= " + size); - } - return length; - } - - @Override - public final String toString() { - return String.format("", - Integer.toHexString(System.identityHashCode(this)), size()); - } - - /** - * This class implements a {@link org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString} backed by a - * single array of bytes, contiguous in memory. It supports substring by - * pointing to only a sub-range of the underlying byte array, meaning that a - * substring will reference the full byte-array of the string it's made from, - * exactly as with {@link String}. - * - * @author carlanton@google.com (Carl Haverl) - */ - // Keep this class private to avoid deadlocks in classloading across threads as ByteString's - // static initializer loads LiteralByteString and another thread loads LiteralByteString. - private static class LiteralByteString extends ByteString.LeafByteString { - private static final long serialVersionUID = 1L; - - protected final byte[] bytes; - - /** - * Creates a {@code LiteralByteString} backed by the given array, without - * copying. - * - * @param bytes array to wrap - */ - LiteralByteString(byte[] bytes) { - this.bytes = bytes; - } - - @Override - public byte byteAt(int index) { - // Unlike most methods in this class, this one is a direct implementation - // ignoring the potential offset because we need to do range-checking in the - // substring case anyway. - return bytes[index]; - } - - @Override - public int size() { - return bytes.length; - } - - // ================================================================= - // ByteString -> substring - - @Override - public final ByteString substring(int beginIndex, int endIndex) { - final int length = checkRange(beginIndex, endIndex, size()); - - if (length == 0) { - return ByteString.EMPTY; - } - - return new BoundedByteString(bytes, getOffsetIntoBytes() + beginIndex, length); - } - - // ================================================================= - // ByteString -> byte[] - - @Override - protected void copyToInternal( - byte[] target, int sourceOffset, int targetOffset, int numberToCopy) { - // Optimized form, not for subclasses, since we don't call - // getOffsetIntoBytes() or check the 'numberToCopy' parameter. - // TODO(nathanmittler): Is not calling getOffsetIntoBytes really saving that much? - System.arraycopy(bytes, sourceOffset, target, targetOffset, numberToCopy); - } - - @Override - public final void copyTo(ByteBuffer target) { - target.put(bytes, getOffsetIntoBytes(), size()); // Copies bytes - } - - @Override - public final ByteBuffer asReadOnlyByteBuffer() { - return ByteBuffer.wrap(bytes, getOffsetIntoBytes(), size()).asReadOnlyBuffer(); - } - - @Override - public final List asReadOnlyByteBufferList() { - return Collections.singletonList(asReadOnlyByteBuffer()); - } - - @Override - public final void writeTo(OutputStream outputStream) throws IOException { - outputStream.write(toByteArray()); - } - - @Override - final void writeToInternal(OutputStream outputStream, int sourceOffset, int numberToWrite) - throws IOException { - outputStream.write(bytes, getOffsetIntoBytes() + sourceOffset, numberToWrite); - } - - @Override - final void writeTo(ByteOutput output) throws IOException { - output.writeLazy(bytes, getOffsetIntoBytes(), size()); - } - - @Override - protected final String toStringInternal(Charset charset) { - return new String(bytes, getOffsetIntoBytes(), size(), charset); - } - - // ================================================================= - // UTF-8 decoding - - @Override - public final boolean isValidUtf8() { - int offset = getOffsetIntoBytes(); - return Utf8.isValidUtf8(bytes, offset, offset + size()); - } - - @Override - protected final int partialIsValidUtf8(int state, int offset, int length) { - int index = getOffsetIntoBytes() + offset; - return Utf8.partialIsValidUtf8(state, bytes, index, index + length); - } - - // ================================================================= - // equals() and hashCode() - - @Override - public final boolean equals(Object other) { - if (other == this) { - return true; - } - if (!(other instanceof ByteString)) { - return false; - } - - if (size() != ((ByteString) other).size()) { - return false; - } - if (size() == 0) { - return true; - } - - if (other instanceof LiteralByteString) { - LiteralByteString otherAsLiteral = (LiteralByteString) other; - // If we know the hash codes and they are not equal, we know the byte - // strings are not equal. - int thisHash = peekCachedHashCode(); - int thatHash = otherAsLiteral.peekCachedHashCode(); - if (thisHash != 0 && thatHash != 0 && thisHash != thatHash) { - return false; - } - - return equalsRange((LiteralByteString) other, 0, size()); - } else { - // RopeByteString and NioByteString. - return other.equals(this); - } - } - - /** - * Check equality of the substring of given length of this object starting at - * zero with another {@code LiteralByteString} substring starting at offset. - * - * @param other what to compare a substring in - * @param offset offset into other - * @param length number of bytes to compare - * @return true for equality of substrings, else false. - */ - @Override - final boolean equalsRange(ByteString other, int offset, int length) { - if (length > other.size()) { - throw new IllegalArgumentException("Length too large: " + length + size()); - } - if (offset + length > other.size()) { - throw new IllegalArgumentException( - "Ran off end of other: " + offset + ", " + length + ", " + other.size()); - } - - if (other instanceof LiteralByteString) { - LiteralByteString lbsOther = (LiteralByteString) other; - byte[] thisBytes = bytes; - byte[] otherBytes = lbsOther.bytes; - int thisLimit = getOffsetIntoBytes() + length; - for ( - int thisIndex = getOffsetIntoBytes(), - otherIndex = lbsOther.getOffsetIntoBytes() + offset; - (thisIndex < thisLimit); ++thisIndex, ++otherIndex) { - if (thisBytes[thisIndex] != otherBytes[otherIndex]) { - return false; - } - } - return true; - } - - return other.substring(offset, offset + length).equals(substring(0, length)); - } - - @Override - protected final int partialHash(int h, int offset, int length) { - return Internal.partialHash(h, bytes, getOffsetIntoBytes() + offset, length); - } - - // ================================================================= - // Input stream - - @Override - public final InputStream newInput() { - return new ByteArrayInputStream(bytes, getOffsetIntoBytes(), size()); // No copy - } - - @Override - public final CodedInputStream newCodedInput() { - // We trust CodedInputStream not to modify the bytes, or to give anyone - // else access to them. - return CodedInputStream.newInstance( - bytes, getOffsetIntoBytes(), size(), true /* bufferIsImmutable */); - } - - // ================================================================= - // Internal methods - - /** - * Offset into {@code bytes[]} to use, non-zero for substrings. - * - * @return always 0 for this class - */ - protected int getOffsetIntoBytes() { - return 0; - } - } - - /** - * This class is used to represent the substring of a {@link ByteString} over a - * single byte array. In terms of the public API of {@link ByteString}, you end - * up here by calling {@link ByteString#copyFrom(byte[])} followed by {@link - * ByteString#substring(int, int)}. - * - *

This class contains most of the overhead involved in creating a substring - * from a {@link LiteralByteString}. The overhead involves some range-checking - * and two extra fields. - * - * @author carlanton@google.com (Carl Haverl) - */ - // Keep this class private to avoid deadlocks in classloading across threads as ByteString's - // static initializer loads LiteralByteString and another thread loads BoundedByteString. - private static final class BoundedByteString extends LiteralByteString { - - private final int bytesOffset; - private final int bytesLength; - - /** - * Creates a {@code BoundedByteString} backed by the sub-range of given array, - * without copying. - * - * @param bytes array to wrap - * @param offset index to first byte to use in bytes - * @param length number of bytes to use from bytes - * @throws IllegalArgumentException if {@code offset < 0}, {@code length < 0}, - * or if {@code offset + length > - * bytes.length}. - */ - BoundedByteString(byte[] bytes, int offset, int length) { - super(bytes); - checkRange(offset, offset + length, bytes.length); - - this.bytesOffset = offset; - this.bytesLength = length; - } - - /** - * Gets the byte at the given index. - * Throws {@link ArrayIndexOutOfBoundsException} - * for backwards-compatibility reasons although it would more properly be - * {@link IndexOutOfBoundsException}. - * - * @param index index of byte - * @return the value - * @throws ArrayIndexOutOfBoundsException {@code index} is < 0 or >= size - */ - @Override - public byte byteAt(int index) { - // We must check the index ourselves as we cannot rely on Java array index - // checking for substrings. - checkIndex(index, size()); - return bytes[bytesOffset + index]; - } - - @Override - public int size() { - return bytesLength; - } - - @Override - protected int getOffsetIntoBytes() { - return bytesOffset; - } - - // ================================================================= - // ByteString -> byte[] - - @Override - protected void copyToInternal(byte[] target, int sourceOffset, int targetOffset, - int numberToCopy) { - System.arraycopy(bytes, getOffsetIntoBytes() + sourceOffset, target, - targetOffset, numberToCopy); - } - - // ================================================================= - // Serializable - - private static final long serialVersionUID = 1L; - - Object writeReplace() { - return ByteString.wrap(toByteArray()); - } - - private void readObject(@SuppressWarnings("unused") ObjectInputStream in) throws IOException { - throw new InvalidObjectException( - "BoundedByteStream instances are not to be serialized directly"); - } - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java deleted file mode 100644 index ecfbcc16cba..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java +++ /dev/null @@ -1,454 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -/** - *

- * Wrapper message for `bytes`.
- * The JSON representation for `BytesValue` is JSON string.
- * 
- * - * Protobuf type {@code google.protobuf.BytesValue} - */ -public final class BytesValue extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.BytesValue) - BytesValueOrBuilder { - // Use BytesValue.newBuilder() to construct. - private BytesValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private BytesValue() { - value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private BytesValue( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - - value_ = input.readBytes(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.Builder.class); - } - - public static final int VALUE_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_; - /** - *
-   * The bytes value.
-   * 
- * - * bytes value = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { - return value_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!value_.isEmpty()) { - output.writeBytes(1, value_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!value_.isEmpty()) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeBytesSize(1, value_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue) obj; - - boolean result = true; - result = result && getValue() - .equals(other.getValue()); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-   * Wrapper message for `bytes`.
-   * The JSON representation for `BytesValue` is JSON string.
-   * 
- * - * Protobuf type {@code google.protobuf.BytesValue} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.BytesValue) - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValueOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; - - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.WrappersProto.internal_static_google_protobuf_BytesValue_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue(this); - result.value_ = value_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue.getDefaultInstance()) return this; - if (other.getValue() != org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY) { - setValue(other.getValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY; - /** - *
-     * The bytes value.
-     * 
- * - * bytes value = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() { - return value_; - } - /** - *
-     * The bytes value.
-     * 
- * - * bytes value = 1; - */ - public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - - value_ = value; - onChanged(); - return this; - } - /** - *
-     * The bytes value.
-     * 
- * - * bytes value = 1; - */ - public Builder clearValue() { - - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.BytesValue) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.BytesValue) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public BytesValue parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new BytesValue(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValueOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValueOrBuilder.java deleted file mode 100644 index 3f90289ed62..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValueOrBuilder.java +++ /dev/null @@ -1,18 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/wrappers.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public interface BytesValueOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.BytesValue) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-   * The bytes value.
-   * 
- * - * bytes value = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue(); -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java deleted file mode 100644 index 23cc1a4d882..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java +++ /dev/null @@ -1,3549 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EMPTY_BYTE_ARRAY; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EMPTY_BYTE_BUFFER; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.UTF_8; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.FIXED_32_SIZE; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.FIXED_64_SIZE; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.MAX_VARINT_SIZE; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -/** - * Reads and decodes protocol message fields. - * - *

This class contains two kinds of methods: methods that read specific protocol message - * constructs and field types (e.g. {@link #readTag()} and {@link #readInt32()}) and methods that - * read low-level values (e.g. {@link #readRawVarint32()} and {@link #readRawBytes}). If you are - * reading encoded protocol messages, you should use the former methods, but if you are reading some - * other format of your own design, use the latter. - * - * @author kenton@google.com Kenton Varda - */ -public abstract class CodedInputStream { - private static final int DEFAULT_BUFFER_SIZE = 4096; - private static final int DEFAULT_RECURSION_LIMIT = 100; - // Integer.MAX_VALUE == 0x7FFFFFF == INT_MAX from limits.h - private static final int DEFAULT_SIZE_LIMIT = Integer.MAX_VALUE; - - /** Visible for subclasses. See setRecursionLimit() */ - int recursionDepth; - - int recursionLimit = DEFAULT_RECURSION_LIMIT; - - /** Visible for subclasses. See setSizeLimit() */ - int sizeLimit = DEFAULT_SIZE_LIMIT; - - /** Create a new CodedInputStream wrapping the given InputStream. */ - public static CodedInputStream newInstance(final InputStream input) { - return newInstance(input, DEFAULT_BUFFER_SIZE); - } - - /** Create a new CodedInputStream wrapping the given InputStream. */ - static CodedInputStream newInstance(final InputStream input, int bufferSize) { - if (input == null) { - // TODO(nathanmittler): Ideally we should throw here. This is done for backward compatibility. - return newInstance(EMPTY_BYTE_ARRAY); - } - return new StreamDecoder(input, bufferSize); - } - - /** Create a new CodedInputStream wrapping the given byte array. */ - public static CodedInputStream newInstance(final byte[] buf) { - return newInstance(buf, 0, buf.length); - } - - /** Create a new CodedInputStream wrapping the given byte array slice. */ - public static CodedInputStream newInstance(final byte[] buf, final int off, final int len) { - return newInstance(buf, off, len, false /* bufferIsImmutable */); - } - - /** Create a new CodedInputStream wrapping the given byte array slice. */ - static CodedInputStream newInstance( - final byte[] buf, final int off, final int len, final boolean bufferIsImmutable) { - ArrayDecoder result = new ArrayDecoder(buf, off, len, bufferIsImmutable); - try { - // Some uses of CodedInputStream can be more efficient if they know - // exactly how many bytes are available. By pushing the end point of the - // buffer as a limit, we allow them to get this information via - // getBytesUntilLimit(). Pushing a limit that we know is at the end of - // the stream can never hurt, since we can never past that point anyway. - result.pushLimit(len); - } catch (InvalidProtocolBufferException ex) { - // The only reason pushLimit() might throw an exception here is if len - // is negative. Normally pushLimit()'s parameter comes directly off the - // wire, so it's important to catch exceptions in case of corrupt or - // malicious data. However, in this case, we expect that len is not a - // user-supplied value, so we can assume that it being negative indicates - // a programming error. Therefore, throwing an unchecked exception is - // appropriate. - throw new IllegalArgumentException(ex); - } - return result; - } - - /** - * Create a new CodedInputStream wrapping the given ByteBuffer. The data starting from the - * ByteBuffer's current position to its limit will be read. The returned CodedInputStream may or - * may not share the underlying data in the ByteBuffer, therefore the ByteBuffer cannot be changed - * while the CodedInputStream is in use. Note that the ByteBuffer's position won't be changed by - * this function. Concurrent calls with the same ByteBuffer object are safe if no other thread is - * trying to alter the ByteBuffer's status. - */ - public static CodedInputStream newInstance(ByteBuffer buf) { - return newInstance(buf, false /* bufferIsImmutable */); - } - - /** Create a new CodedInputStream wrapping the given buffer. */ - static CodedInputStream newInstance(ByteBuffer buf, boolean bufferIsImmutable) { - if (buf.hasArray()) { - return newInstance( - buf.array(), buf.arrayOffset() + buf.position(), buf.remaining(), bufferIsImmutable); - } - - if (buf.isDirect() && UnsafeDirectNioDecoder.isSupported()) { - return new UnsafeDirectNioDecoder(buf, bufferIsImmutable); - } - - // The buffer is non-direct and does not expose the underlying array. Using the ByteBuffer API - // to access individual bytes is very slow, so just copy the buffer to an array. - // TODO(nathanmittler): Re-evaluate with Java 9 - byte[] buffer = new byte[buf.remaining()]; - buf.duplicate().get(buffer); - return newInstance(buffer, 0, buffer.length, true); - } - - /** Create a new CodedInputStream wrapping the given {@link ByteInput}. */ - static CodedInputStream newInstance(ByteInput buf, int off, int len, boolean bufferIsImmutable) { - return new ByteInputDecoder(buf, off, len, bufferIsImmutable); - } - - /** Disable construction/inheritance outside of this class. */ - private CodedInputStream() {} - - // ----------------------------------------------------------------- - - /** - * Attempt to read a field tag, returning zero if we have reached EOF. Protocol message parsers - * use this to read tags, since a protocol message may legally end wherever a tag occurs, and zero - * is not a valid tag number. - */ - public abstract int readTag() throws IOException; - - /** - * Verifies that the last call to readTag() returned the given tag value. This is used to verify - * that a nested group ended with the correct end tag. - * - * @throws InvalidProtocolBufferException {@code value} does not match the last tag. - */ - public abstract void checkLastTagWas(final int value) throws InvalidProtocolBufferException; - - public abstract int getLastTag(); - - /** - * Reads and discards a single field, given its tag value. - * - * @return {@code false} if the tag is an endgroup tag, in which case nothing is skipped. - * Otherwise, returns {@code true}. - */ - public abstract boolean skipField(final int tag) throws IOException; - - /** - * Reads a single field and writes it to output in wire format, given its tag value. - * - * @return {@code false} if the tag is an endgroup tag, in which case nothing is skipped. - * Otherwise, returns {@code true}. - * @deprecated use {@code UnknownFieldSet} or {@code UnknownFieldSetLite} to skip to an output - * stream. - */ - @Deprecated - public abstract boolean skipField(final int tag, final CodedOutputStream output) - throws IOException; - - /** - * Reads and discards an entire message. This will read either until EOF or until an endgroup tag, - * whichever comes first. - */ - public abstract void skipMessage() throws IOException; - - /** - * Reads an entire message and writes it to output in wire format. This will read either until EOF - * or until an endgroup tag, whichever comes first. - */ - public abstract void skipMessage(CodedOutputStream output) throws IOException; - - - // ----------------------------------------------------------------- - - /** Read a {@code double} field value from the stream. */ - public abstract double readDouble() throws IOException; - - /** Read a {@code float} field value from the stream. */ - public abstract float readFloat() throws IOException; - - /** Read a {@code uint64} field value from the stream. */ - public abstract long readUInt64() throws IOException; - - /** Read an {@code int64} field value from the stream. */ - public abstract long readInt64() throws IOException; - - /** Read an {@code int32} field value from the stream. */ - public abstract int readInt32() throws IOException; - - /** Read a {@code fixed64} field value from the stream. */ - public abstract long readFixed64() throws IOException; - - /** Read a {@code fixed32} field value from the stream. */ - public abstract int readFixed32() throws IOException; - - /** Read a {@code bool} field value from the stream. */ - public abstract boolean readBool() throws IOException; - - /** - * Read a {@code string} field value from the stream. If the stream contains malformed UTF-8, - * replace the offending bytes with the standard UTF-8 replacement character. - */ - public abstract String readString() throws IOException; - - /** - * Read a {@code string} field value from the stream. If the stream contains malformed UTF-8, - * throw exception {@link InvalidProtocolBufferException}. - */ - public abstract String readStringRequireUtf8() throws IOException; - - /** Read a {@code group} field value from the stream. */ - public abstract void readGroup( - final int fieldNumber, - final MessageLite.Builder builder, - final ExtensionRegistryLite extensionRegistry) - throws IOException; - - - /** Read a {@code group} field value from the stream. */ - public abstract T readGroup( - final int fieldNumber, final Parser parser, final ExtensionRegistryLite extensionRegistry) - throws IOException; - - /** - * Reads a {@code group} field value from the stream and merges it into the given {@link - * UnknownFieldSet}. - * - * @deprecated UnknownFieldSet.Builder now implements MessageLite.Builder, so you can just call - * {@link #readGroup}. - */ - @Deprecated - public abstract void readUnknownGroup(final int fieldNumber, final MessageLite.Builder builder) - throws IOException; - - /** Read an embedded message field value from the stream. */ - public abstract void readMessage( - final MessageLite.Builder builder, final ExtensionRegistryLite extensionRegistry) - throws IOException; - - - /** Read an embedded message field value from the stream. */ - public abstract T readMessage( - final Parser parser, final ExtensionRegistryLite extensionRegistry) throws IOException; - - /** Read a {@code bytes} field value from the stream. */ - public abstract ByteString readBytes() throws IOException; - - /** Read a {@code bytes} field value from the stream. */ - public abstract byte[] readByteArray() throws IOException; - - /** Read a {@code bytes} field value from the stream. */ - public abstract ByteBuffer readByteBuffer() throws IOException; - - /** Read a {@code uint32} field value from the stream. */ - public abstract int readUInt32() throws IOException; - - /** - * Read an enum field value from the stream. Caller is responsible for converting the numeric - * value to an actual enum. - */ - public abstract int readEnum() throws IOException; - - /** Read an {@code sfixed32} field value from the stream. */ - public abstract int readSFixed32() throws IOException; - - /** Read an {@code sfixed64} field value from the stream. */ - public abstract long readSFixed64() throws IOException; - - /** Read an {@code sint32} field value from the stream. */ - public abstract int readSInt32() throws IOException; - - /** Read an {@code sint64} field value from the stream. */ - public abstract long readSInt64() throws IOException; - - // ================================================================= - - /** Read a raw Varint from the stream. If larger than 32 bits, discard the upper bits. */ - public abstract int readRawVarint32() throws IOException; - - /** Read a raw Varint from the stream. */ - public abstract long readRawVarint64() throws IOException; - - /** Variant of readRawVarint64 for when uncomfortably close to the limit. */ - /* Visible for testing */ - abstract long readRawVarint64SlowPath() throws IOException; - - /** Read a 32-bit little-endian integer from the stream. */ - public abstract int readRawLittleEndian32() throws IOException; - - /** Read a 64-bit little-endian integer from the stream. */ - public abstract long readRawLittleEndian64() throws IOException; - - // ----------------------------------------------------------------- - - /** - * Enables {@link ByteString} aliasing of the underlying buffer, trading off on buffer pinning for - * data copies. Only valid for buffer-backed streams. - */ - public abstract void enableAliasing(boolean enabled); - - /** - * Set the maximum message recursion depth. In order to prevent malicious messages from causing - * stack overflows, {@code CodedInputStream} limits how deeply messages may be nested. The default - * limit is 64. - * - * @return the old limit. - */ - public final int setRecursionLimit(final int limit) { - if (limit < 0) { - throw new IllegalArgumentException("Recursion limit cannot be negative: " + limit); - } - final int oldLimit = recursionLimit; - recursionLimit = limit; - return oldLimit; - } - - /** - * Only valid for {@link InputStream}-backed streams. - * - *

Set the maximum message size. In order to prevent malicious messages from exhausting memory - * or causing integer overflows, {@code CodedInputStream} limits how large a message may be. The - * default limit is 64MB. You should set this limit as small as you can without harming your app's - * functionality. Note that size limits only apply when reading from an {@code InputStream}, not - * when constructed around a raw byte array (nor with {@link ByteString#newCodedInput}). - * - *

If you want to read several messages from a single CodedInputStream, you could call {@link - * #resetSizeCounter()} after each one to avoid hitting the size limit. - * - * @return the old limit. - */ - public final int setSizeLimit(final int limit) { - if (limit < 0) { - throw new IllegalArgumentException("Size limit cannot be negative: " + limit); - } - final int oldLimit = sizeLimit; - sizeLimit = limit; - return oldLimit; - } - - /** - * Resets the current size counter to zero (see {@link #setSizeLimit(int)}). Only valid for {@link - * InputStream}-backed streams. - */ - public abstract void resetSizeCounter(); - - /** - * Sets {@code currentLimit} to (current position) + {@code byteLimit}. This is called when - * descending into a length-delimited embedded message. - * - *

Note that {@code pushLimit()} does NOT affect how many bytes the {@code CodedInputStream} - * reads from an underlying {@code InputStream} when refreshing its buffer. If you need to prevent - * reading past a certain point in the underlying {@code InputStream} (e.g. because you expect it - * to contain more data after the end of the message which you need to handle differently) then - * you must place a wrapper around your {@code InputStream} which limits the amount of data that - * can be read from it. - * - * @return the old limit. - */ - public abstract int pushLimit(int byteLimit) throws InvalidProtocolBufferException; - - /** - * Discards the current limit, returning to the previous limit. - * - * @param oldLimit The old limit, as returned by {@code pushLimit}. - */ - public abstract void popLimit(final int oldLimit); - - /** - * Returns the number of bytes to be read before the current limit. If no limit is set, returns - * -1. - */ - public abstract int getBytesUntilLimit(); - - /** - * Returns true if the stream has reached the end of the input. This is the case if either the end - * of the underlying input source has been reached or if the stream has reached a limit created - * using {@link #pushLimit(int)}. - */ - public abstract boolean isAtEnd() throws IOException; - - /** - * The total bytes read up to the current position. Calling {@link #resetSizeCounter()} resets - * this value to zero. - */ - public abstract int getTotalBytesRead(); - - /** - * Read one byte from the input. - * - * @throws InvalidProtocolBufferException The end of the stream or the current limit was reached. - */ - public abstract byte readRawByte() throws IOException; - - /** - * Read a fixed size of bytes from the input. - * - * @throws InvalidProtocolBufferException The end of the stream or the current limit was reached. - */ - public abstract byte[] readRawBytes(final int size) throws IOException; - - /** - * Reads and discards {@code size} bytes. - * - * @throws InvalidProtocolBufferException The end of the stream or the current limit was reached. - */ - public abstract void skipRawBytes(final int size) throws IOException; - - /** - * Decode a ZigZag-encoded 32-bit value. ZigZag encodes signed integers into values that can be - * efficiently encoded with varint. (Otherwise, negative values must be sign-extended to 64 bits - * to be varint encoded, thus always taking 10 bytes on the wire.) - * - * @param n An unsigned 32-bit integer, stored in a signed int because Java has no explicit - * unsigned support. - * @return A signed 32-bit integer. - */ - public static int decodeZigZag32(final int n) { - return (n >>> 1) ^ -(n & 1); - } - - /** - * Decode a ZigZag-encoded 64-bit value. ZigZag encodes signed integers into values that can be - * efficiently encoded with varint. (Otherwise, negative values must be sign-extended to 64 bits - * to be varint encoded, thus always taking 10 bytes on the wire.) - * - * @param n An unsigned 64-bit integer, stored in a signed int because Java has no explicit - * unsigned support. - * @return A signed 64-bit integer. - */ - public static long decodeZigZag64(final long n) { - return (n >>> 1) ^ -(n & 1); - } - - /** - * Like {@link #readRawVarint32(InputStream)}, but expects that the caller has already read one - * byte. This allows the caller to determine if EOF has been reached before attempting to read. - */ - public static int readRawVarint32(final int firstByte, final InputStream input) - throws IOException { - if ((firstByte & 0x80) == 0) { - return firstByte; - } - - int result = firstByte & 0x7f; - int offset = 7; - for (; offset < 32; offset += 7) { - final int b = input.read(); - if (b == -1) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - result |= (b & 0x7f) << offset; - if ((b & 0x80) == 0) { - return result; - } - } - // Keep reading up to 64 bits. - for (; offset < 64; offset += 7) { - final int b = input.read(); - if (b == -1) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - if ((b & 0x80) == 0) { - return result; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - /** - * Reads a varint from the input one byte at a time, so that it does not read any bytes after the - * end of the varint. If you simply wrapped the stream in a CodedInputStream and used {@link - * #readRawVarint32(InputStream)} then you would probably end up reading past the end of the - * varint since CodedInputStream buffers its input. - */ - static int readRawVarint32(final InputStream input) throws IOException { - final int firstByte = input.read(); - if (firstByte == -1) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - return readRawVarint32(firstByte, input); - } - - /** A {@link CodedInputStream} implementation that uses a backing array as the input. */ - private static final class ArrayDecoder extends CodedInputStream { - private final byte[] buffer; - private final boolean immutable; - private int limit; - private int bufferSizeAfterLimit; - private int pos; - private int startPos; - private int lastTag; - private boolean enableAliasing; - - /** The absolute position of the end of the current message. */ - private int currentLimit = Integer.MAX_VALUE; - - private ArrayDecoder(final byte[] buffer, final int offset, final int len, boolean immutable) { - this.buffer = buffer; - limit = offset + len; - pos = offset; - startPos = pos; - this.immutable = immutable; - } - - @Override - public int readTag() throws IOException { - if (isAtEnd()) { - lastTag = 0; - return 0; - } - - lastTag = readRawVarint32(); - if (WireFormat.getTagFieldNumber(lastTag) == 0) { - // If we actually read zero (or any tag number corresponding to field - // number zero), that's not a valid tag. - throw InvalidProtocolBufferException.invalidTag(); - } - return lastTag; - } - - @Override - public void checkLastTagWas(final int value) throws InvalidProtocolBufferException { - if (lastTag != value) { - throw InvalidProtocolBufferException.invalidEndTag(); - } - } - - @Override - public int getLastTag() { - return lastTag; - } - - @Override - public boolean skipField(final int tag) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - skipRawVarint(); - return true; - case WireFormat.WIRETYPE_FIXED64: - skipRawBytes(FIXED_64_SIZE); - return true; - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - skipRawBytes(readRawVarint32()); - return true; - case WireFormat.WIRETYPE_START_GROUP: - skipMessage(); - checkLastTagWas( - WireFormat.makeTag(WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP)); - return true; - case WireFormat.WIRETYPE_END_GROUP: - return false; - case WireFormat.WIRETYPE_FIXED32: - skipRawBytes(FIXED_32_SIZE); - return true; - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public boolean skipField(final int tag, final CodedOutputStream output) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - { - long value = readInt64(); - output.writeRawVarint32(tag); - output.writeUInt64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_FIXED64: - { - long value = readRawLittleEndian64(); - output.writeRawVarint32(tag); - output.writeFixed64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - { - ByteString value = readBytes(); - output.writeRawVarint32(tag); - output.writeBytesNoTag(value); - return true; - } - case WireFormat.WIRETYPE_START_GROUP: - { - output.writeRawVarint32(tag); - skipMessage(output); - int endtag = - WireFormat.makeTag( - WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP); - checkLastTagWas(endtag); - output.writeRawVarint32(endtag); - return true; - } - case WireFormat.WIRETYPE_END_GROUP: - { - return false; - } - case WireFormat.WIRETYPE_FIXED32: - { - int value = readRawLittleEndian32(); - output.writeRawVarint32(tag); - output.writeFixed32NoTag(value); - return true; - } - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public void skipMessage() throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag)) { - return; - } - } - } - - @Override - public void skipMessage(CodedOutputStream output) throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag, output)) { - return; - } - } - } - - - // ----------------------------------------------------------------- - - @Override - public double readDouble() throws IOException { - return Double.longBitsToDouble(readRawLittleEndian64()); - } - - @Override - public float readFloat() throws IOException { - return Float.intBitsToFloat(readRawLittleEndian32()); - } - - @Override - public long readUInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public long readInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public int readInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public long readFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public boolean readBool() throws IOException { - return readRawVarint64() != 0; - } - - @Override - public String readString() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= (limit - pos)) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - final String result = new String(buffer, pos, size, UTF_8); - pos += size; - return result; - } - - if (size == 0) { - return ""; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public String readStringRequireUtf8() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= (limit - pos)) { - // TODO(martinrb): We could save a pass by validating while decoding. - if (!Utf8.isValidUtf8(buffer, pos, pos + size)) { - throw InvalidProtocolBufferException.invalidUtf8(); - } - final int tempPos = pos; - pos += size; - return new String(buffer, tempPos, size, UTF_8); - } - - if (size == 0) { - return ""; - } - if (size <= 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public void readGroup( - final int fieldNumber, - final MessageLite.Builder builder, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - } - - - @Override - public T readGroup( - final int fieldNumber, - final Parser parser, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - return result; - } - - @Deprecated - @Override - public void readUnknownGroup(final int fieldNumber, final MessageLite.Builder builder) - throws IOException { - readGroup(fieldNumber, builder, ExtensionRegistryLite.getEmptyRegistry()); - } - - @Override - public void readMessage( - final MessageLite.Builder builder, final ExtensionRegistryLite extensionRegistry) - throws IOException { - final int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - } - - - @Override - public T readMessage( - final Parser parser, final ExtensionRegistryLite extensionRegistry) throws IOException { - int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - return result; - } - - @Override - public ByteString readBytes() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= (limit - pos)) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - final ByteString result = - immutable && enableAliasing - ? ByteString.wrap(buffer, pos, size) - : ByteString.copyFrom(buffer, pos, size); - pos += size; - return result; - } - if (size == 0) { - return ByteString.EMPTY; - } - // Slow path: Build a byte array first then copy it. - return ByteString.wrap(readRawBytes(size)); - } - - @Override - public byte[] readByteArray() throws IOException { - final int size = readRawVarint32(); - return readRawBytes(size); - } - - @Override - public ByteBuffer readByteBuffer() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= (limit - pos)) { - // Fast path: We already have the bytes in a contiguous buffer. - // When aliasing is enabled, we can return a ByteBuffer pointing directly - // into the underlying byte array without copy if the CodedInputStream is - // constructed from a byte array. If aliasing is disabled or the input is - // from an InputStream or ByteString, we have to make a copy of the bytes. - ByteBuffer result = - !immutable && enableAliasing - ? ByteBuffer.wrap(buffer, pos, size).slice() - : ByteBuffer.wrap(Arrays.copyOfRange(buffer, pos, pos + size)); - pos += size; - // TODO(nathanmittler): Investigate making the ByteBuffer be made read-only - return result; - } - - if (size == 0) { - return EMPTY_BYTE_BUFFER; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public int readUInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public int readEnum() throws IOException { - return readRawVarint32(); - } - - @Override - public int readSFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public long readSFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readSInt32() throws IOException { - return decodeZigZag32(readRawVarint32()); - } - - @Override - public long readSInt64() throws IOException { - return decodeZigZag64(readRawVarint64()); - } - - // ================================================================= - - @Override - public int readRawVarint32() throws IOException { - // See implementation notes for readRawVarint64 - fastpath: - { - int tempPos = pos; - - if (limit == tempPos) { - break fastpath; - } - - final byte[] buffer = this.buffer; - int x; - if ((x = buffer[tempPos++]) >= 0) { - pos = tempPos; - return x; - } else if (limit - tempPos < 9) { - break fastpath; - } else if ((x ^= (buffer[tempPos++] << 7)) < 0) { - x ^= (~0 << 7); - } else if ((x ^= (buffer[tempPos++] << 14)) >= 0) { - x ^= (~0 << 7) ^ (~0 << 14); - } else if ((x ^= (buffer[tempPos++] << 21)) < 0) { - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21); - } else { - int y = buffer[tempPos++]; - x ^= y << 28; - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) ^ (~0 << 28); - if (y < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0) { - break fastpath; // Will throw malformedVarint() - } - } - pos = tempPos; - return x; - } - return (int) readRawVarint64SlowPath(); - } - - private void skipRawVarint() throws IOException { - if (limit - pos >= MAX_VARINT_SIZE) { - skipRawVarintFastPath(); - } else { - skipRawVarintSlowPath(); - } - } - - private void skipRawVarintFastPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (buffer[pos++] >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - private void skipRawVarintSlowPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (readRawByte() >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public long readRawVarint64() throws IOException { - // Implementation notes: - // - // Optimized for one-byte values, expected to be common. - // The particular code below was selected from various candidates - // empirically, by winning VarintBenchmark. - // - // Sign extension of (signed) Java bytes is usually a nuisance, but - // we exploit it here to more easily obtain the sign of bytes read. - // Instead of cleaning up the sign extension bits by masking eagerly, - // we delay until we find the final (positive) byte, when we clear all - // accumulated bits with one xor. We depend on javac to constant fold. - fastpath: - { - int tempPos = pos; - - if (limit == tempPos) { - break fastpath; - } - - final byte[] buffer = this.buffer; - long x; - int y; - if ((y = buffer[tempPos++]) >= 0) { - pos = tempPos; - return y; - } else if (limit - tempPos < 9) { - break fastpath; - } else if ((y ^= (buffer[tempPos++] << 7)) < 0) { - x = y ^ (~0 << 7); - } else if ((y ^= (buffer[tempPos++] << 14)) >= 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14)); - } else if ((y ^= (buffer[tempPos++] << 21)) < 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21)); - } else if ((x = y ^ ((long) buffer[tempPos++] << 28)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28); - } else if ((x ^= ((long) buffer[tempPos++] << 35)) < 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35); - } else if ((x ^= ((long) buffer[tempPos++] << 42)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42); - } else if ((x ^= ((long) buffer[tempPos++] << 49)) < 0L) { - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49); - } else { - x ^= ((long) buffer[tempPos++] << 56); - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49) - ^ (~0L << 56); - if (x < 0L) { - if (buffer[tempPos++] < 0L) { - break fastpath; // Will throw malformedVarint() - } - } - } - pos = tempPos; - return x; - } - return readRawVarint64SlowPath(); - } - - @Override - long readRawVarint64SlowPath() throws IOException { - long result = 0; - for (int shift = 0; shift < 64; shift += 7) { - final byte b = readRawByte(); - result |= (long) (b & 0x7F) << shift; - if ((b & 0x80) == 0) { - return result; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public int readRawLittleEndian32() throws IOException { - int tempPos = pos; - - if (limit - tempPos < FIXED_32_SIZE) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - - final byte[] buffer = this.buffer; - pos = tempPos + FIXED_32_SIZE; - return (((buffer[tempPos] & 0xff)) - | ((buffer[tempPos + 1] & 0xff) << 8) - | ((buffer[tempPos + 2] & 0xff) << 16) - | ((buffer[tempPos + 3] & 0xff) << 24)); - } - - @Override - public long readRawLittleEndian64() throws IOException { - int tempPos = pos; - - if (limit - tempPos < FIXED_64_SIZE) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - - final byte[] buffer = this.buffer; - pos = tempPos + FIXED_64_SIZE; - return (((buffer[tempPos] & 0xffL)) - | ((buffer[tempPos + 1] & 0xffL) << 8) - | ((buffer[tempPos + 2] & 0xffL) << 16) - | ((buffer[tempPos + 3] & 0xffL) << 24) - | ((buffer[tempPos + 4] & 0xffL) << 32) - | ((buffer[tempPos + 5] & 0xffL) << 40) - | ((buffer[tempPos + 6] & 0xffL) << 48) - | ((buffer[tempPos + 7] & 0xffL) << 56)); - } - - @Override - public void enableAliasing(boolean enabled) { - this.enableAliasing = enabled; - } - - @Override - public void resetSizeCounter() { - startPos = pos; - } - - @Override - public int pushLimit(int byteLimit) throws InvalidProtocolBufferException { - if (byteLimit < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - byteLimit += getTotalBytesRead(); - final int oldLimit = currentLimit; - if (byteLimit > oldLimit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - currentLimit = byteLimit; - - recomputeBufferSizeAfterLimit(); - - return oldLimit; - } - - private void recomputeBufferSizeAfterLimit() { - limit += bufferSizeAfterLimit; - final int bufferEnd = limit - startPos; - if (bufferEnd > currentLimit) { - // Limit is in current buffer. - bufferSizeAfterLimit = bufferEnd - currentLimit; - limit -= bufferSizeAfterLimit; - } else { - bufferSizeAfterLimit = 0; - } - } - - @Override - public void popLimit(final int oldLimit) { - currentLimit = oldLimit; - recomputeBufferSizeAfterLimit(); - } - - @Override - public int getBytesUntilLimit() { - if (currentLimit == Integer.MAX_VALUE) { - return -1; - } - - return currentLimit - getTotalBytesRead(); - } - - @Override - public boolean isAtEnd() throws IOException { - return pos == limit; - } - - @Override - public int getTotalBytesRead() { - return pos - startPos; - } - - @Override - public byte readRawByte() throws IOException { - if (pos == limit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - return buffer[pos++]; - } - - @Override - public byte[] readRawBytes(final int length) throws IOException { - if (length > 0 && length <= (limit - pos)) { - final int tempPos = pos; - pos += length; - return Arrays.copyOfRange(buffer, tempPos, pos); - } - - if (length <= 0) { - if (length == 0) { - return Internal.EMPTY_BYTE_ARRAY; - } else { - throw InvalidProtocolBufferException.negativeSize(); - } - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public void skipRawBytes(final int length) throws IOException { - if (length >= 0 && length <= (limit - pos)) { - // We have all the bytes we need already. - pos += length; - return; - } - - if (length < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - } - - /** - * A {@link CodedInputStream} implementation that uses a backing direct ByteBuffer as the input. - * Requires the use of {@code sun.misc.Unsafe} to perform fast reads on the buffer. - */ - private static final class UnsafeDirectNioDecoder extends CodedInputStream { - /** The direct buffer that is backing this stream. */ - private final ByteBuffer buffer; - - /** - * If {@code true}, indicates that the buffer is backing a {@link ByteString} and is therefore - * considered to be an immutable input source. - */ - private final boolean immutable; - - /** The unsafe address of the content of {@link #buffer}. */ - private final long address; - - /** The unsafe address of the current read limit of the buffer. */ - private long limit; - - /** The unsafe address of the current read position of the buffer. */ - private long pos; - - /** The unsafe address of the starting read position. */ - private long startPos; - - /** The amount of available data in the buffer beyond {@link #limit}. */ - private int bufferSizeAfterLimit; - - /** The last tag that was read from this stream. */ - private int lastTag; - - /** - * If {@code true}, indicates that calls to read {@link ByteString} or {@code byte[]} - * may return slices of the underlying buffer, rather than copies. - */ - private boolean enableAliasing; - - /** The absolute position of the end of the current message. */ - private int currentLimit = Integer.MAX_VALUE; - - static boolean isSupported() { - return UnsafeUtil.hasUnsafeByteBufferOperations(); - } - - private UnsafeDirectNioDecoder(ByteBuffer buffer, boolean immutable) { - this.buffer = buffer; - address = UnsafeUtil.addressOffset(buffer); - limit = address + buffer.limit(); - pos = address + buffer.position(); - startPos = pos; - this.immutable = immutable; - } - - @Override - public int readTag() throws IOException { - if (isAtEnd()) { - lastTag = 0; - return 0; - } - - lastTag = readRawVarint32(); - if (WireFormat.getTagFieldNumber(lastTag) == 0) { - // If we actually read zero (or any tag number corresponding to field - // number zero), that's not a valid tag. - throw InvalidProtocolBufferException.invalidTag(); - } - return lastTag; - } - - @Override - public void checkLastTagWas(final int value) throws InvalidProtocolBufferException { - if (lastTag != value) { - throw InvalidProtocolBufferException.invalidEndTag(); - } - } - - @Override - public int getLastTag() { - return lastTag; - } - - @Override - public boolean skipField(final int tag) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - skipRawVarint(); - return true; - case WireFormat.WIRETYPE_FIXED64: - skipRawBytes(FIXED_64_SIZE); - return true; - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - skipRawBytes(readRawVarint32()); - return true; - case WireFormat.WIRETYPE_START_GROUP: - skipMessage(); - checkLastTagWas( - WireFormat.makeTag(WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP)); - return true; - case WireFormat.WIRETYPE_END_GROUP: - return false; - case WireFormat.WIRETYPE_FIXED32: - skipRawBytes(FIXED_32_SIZE); - return true; - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public boolean skipField(final int tag, final CodedOutputStream output) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - { - long value = readInt64(); - output.writeRawVarint32(tag); - output.writeUInt64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_FIXED64: - { - long value = readRawLittleEndian64(); - output.writeRawVarint32(tag); - output.writeFixed64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - { - ByteString value = readBytes(); - output.writeRawVarint32(tag); - output.writeBytesNoTag(value); - return true; - } - case WireFormat.WIRETYPE_START_GROUP: - { - output.writeRawVarint32(tag); - skipMessage(output); - int endtag = - WireFormat.makeTag( - WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP); - checkLastTagWas(endtag); - output.writeRawVarint32(endtag); - return true; - } - case WireFormat.WIRETYPE_END_GROUP: - { - return false; - } - case WireFormat.WIRETYPE_FIXED32: - { - int value = readRawLittleEndian32(); - output.writeRawVarint32(tag); - output.writeFixed32NoTag(value); - return true; - } - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public void skipMessage() throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag)) { - return; - } - } - } - - @Override - public void skipMessage(CodedOutputStream output) throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag, output)) { - return; - } - } - } - - - // ----------------------------------------------------------------- - - @Override - public double readDouble() throws IOException { - return Double.longBitsToDouble(readRawLittleEndian64()); - } - - @Override - public float readFloat() throws IOException { - return Float.intBitsToFloat(readRawLittleEndian32()); - } - - @Override - public long readUInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public long readInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public int readInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public long readFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public boolean readBool() throws IOException { - return readRawVarint64() != 0; - } - - @Override - public String readString() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= remaining()) { - // TODO(nathanmittler): Is there a way to avoid this copy? - byte[] bytes = copyToArray(pos, pos + size); - String result = new String(bytes, UTF_8); - pos += size; - return result; - } - - if (size == 0) { - return ""; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public String readStringRequireUtf8() throws IOException { - final int size = readRawVarint32(); - if (size >= 0 && size <= remaining()) { - // TODO(nathanmittler): Is there a way to avoid this copy? - byte[] bytes = copyToArray(pos, pos + size); - // TODO(martinrb): We could save a pass by validating while decoding. - if (!Utf8.isValidUtf8(bytes)) { - throw InvalidProtocolBufferException.invalidUtf8(); - } - - String result = new String(bytes, UTF_8); - pos += size; - return result; - } - - if (size == 0) { - return ""; - } - if (size <= 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public void readGroup( - final int fieldNumber, - final MessageLite.Builder builder, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - } - - - @Override - public T readGroup( - final int fieldNumber, - final Parser parser, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - return result; - } - - @Deprecated - @Override - public void readUnknownGroup(final int fieldNumber, final MessageLite.Builder builder) - throws IOException { - readGroup(fieldNumber, builder, ExtensionRegistryLite.getEmptyRegistry()); - } - - @Override - public void readMessage( - final MessageLite.Builder builder, final ExtensionRegistryLite extensionRegistry) - throws IOException { - final int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - } - - - @Override - public T readMessage( - final Parser parser, final ExtensionRegistryLite extensionRegistry) throws IOException { - int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - return result; - } - - @Override - public ByteString readBytes() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= remaining()) { - ByteBuffer result; - if (immutable && enableAliasing) { - result = slice(pos, pos + size); - } else { - result = copy(pos, pos + size); - } - pos += size; - return ByteString.wrap(result); - } - - if (size == 0) { - return ByteString.EMPTY; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public byte[] readByteArray() throws IOException { - return readRawBytes(readRawVarint32()); - } - - @Override - public ByteBuffer readByteBuffer() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= remaining()) { - ByteBuffer result; - // "Immutable" implies that buffer is backing a ByteString. - // Disallow slicing in this case to prevent the caller from modifying the contents - // of the ByteString. - if (!immutable && enableAliasing) { - result = slice(pos, pos + size); - } else { - result = copy(pos, pos + size); - } - pos += size; - // TODO(nathanmittler): Investigate making the ByteBuffer be made read-only - return result; - } - - if (size == 0) { - return EMPTY_BYTE_BUFFER; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public int readUInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public int readEnum() throws IOException { - return readRawVarint32(); - } - - @Override - public int readSFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public long readSFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readSInt32() throws IOException { - return decodeZigZag32(readRawVarint32()); - } - - @Override - public long readSInt64() throws IOException { - return decodeZigZag64(readRawVarint64()); - } - - // ================================================================= - - @Override - public int readRawVarint32() throws IOException { - // See implementation notes for readRawVarint64 - fastpath: - { - long tempPos = pos; - - if (limit == tempPos) { - break fastpath; - } - - int x; - if ((x = UnsafeUtil.getByte(tempPos++)) >= 0) { - pos = tempPos; - return x; - } else if (limit - tempPos < 9) { - break fastpath; - } else if ((x ^= (UnsafeUtil.getByte(tempPos++) << 7)) < 0) { - x ^= (~0 << 7); - } else if ((x ^= (UnsafeUtil.getByte(tempPos++) << 14)) >= 0) { - x ^= (~0 << 7) ^ (~0 << 14); - } else if ((x ^= (UnsafeUtil.getByte(tempPos++) << 21)) < 0) { - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21); - } else { - int y = UnsafeUtil.getByte(tempPos++); - x ^= y << 28; - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) ^ (~0 << 28); - if (y < 0 - && UnsafeUtil.getByte(tempPos++) < 0 - && UnsafeUtil.getByte(tempPos++) < 0 - && UnsafeUtil.getByte(tempPos++) < 0 - && UnsafeUtil.getByte(tempPos++) < 0 - && UnsafeUtil.getByte(tempPos++) < 0) { - break fastpath; // Will throw malformedVarint() - } - } - pos = tempPos; - return x; - } - return (int) readRawVarint64SlowPath(); - } - - private void skipRawVarint() throws IOException { - if (remaining() >= MAX_VARINT_SIZE) { - skipRawVarintFastPath(); - } else { - skipRawVarintSlowPath(); - } - } - - private void skipRawVarintFastPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (UnsafeUtil.getByte(pos++) >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - private void skipRawVarintSlowPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (readRawByte() >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public long readRawVarint64() throws IOException { - // Implementation notes: - // - // Optimized for one-byte values, expected to be common. - // The particular code below was selected from various candidates - // empirically, by winning VarintBenchmark. - // - // Sign extension of (signed) Java bytes is usually a nuisance, but - // we exploit it here to more easily obtain the sign of bytes read. - // Instead of cleaning up the sign extension bits by masking eagerly, - // we delay until we find the final (positive) byte, when we clear all - // accumulated bits with one xor. We depend on javac to constant fold. - fastpath: - { - long tempPos = pos; - - if (limit == tempPos) { - break fastpath; - } - - long x; - int y; - if ((y = UnsafeUtil.getByte(tempPos++)) >= 0) { - pos = tempPos; - return y; - } else if (limit - tempPos < 9) { - break fastpath; - } else if ((y ^= (UnsafeUtil.getByte(tempPos++) << 7)) < 0) { - x = y ^ (~0 << 7); - } else if ((y ^= (UnsafeUtil.getByte(tempPos++) << 14)) >= 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14)); - } else if ((y ^= (UnsafeUtil.getByte(tempPos++) << 21)) < 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21)); - } else if ((x = y ^ ((long) UnsafeUtil.getByte(tempPos++) << 28)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28); - } else if ((x ^= ((long) UnsafeUtil.getByte(tempPos++) << 35)) < 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35); - } else if ((x ^= ((long) UnsafeUtil.getByte(tempPos++) << 42)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42); - } else if ((x ^= ((long) UnsafeUtil.getByte(tempPos++) << 49)) < 0L) { - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49); - } else { - x ^= ((long) UnsafeUtil.getByte(tempPos++) << 56); - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49) - ^ (~0L << 56); - if (x < 0L) { - if (UnsafeUtil.getByte(tempPos++) < 0L) { - break fastpath; // Will throw malformedVarint() - } - } - } - pos = tempPos; - return x; - } - return readRawVarint64SlowPath(); - } - - @Override - long readRawVarint64SlowPath() throws IOException { - long result = 0; - for (int shift = 0; shift < 64; shift += 7) { - final byte b = readRawByte(); - result |= (long) (b & 0x7F) << shift; - if ((b & 0x80) == 0) { - return result; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public int readRawLittleEndian32() throws IOException { - long tempPos = pos; - - if (limit - tempPos < FIXED_32_SIZE) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - - pos = tempPos + FIXED_32_SIZE; - return (((UnsafeUtil.getByte(tempPos) & 0xff)) - | ((UnsafeUtil.getByte(tempPos + 1) & 0xff) << 8) - | ((UnsafeUtil.getByte(tempPos + 2) & 0xff) << 16) - | ((UnsafeUtil.getByte(tempPos + 3) & 0xff) << 24)); - } - - @Override - public long readRawLittleEndian64() throws IOException { - long tempPos = pos; - - if (limit - tempPos < FIXED_64_SIZE) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - - pos = tempPos + FIXED_64_SIZE; - return (((UnsafeUtil.getByte(tempPos) & 0xffL)) - | ((UnsafeUtil.getByte(tempPos + 1) & 0xffL) << 8) - | ((UnsafeUtil.getByte(tempPos + 2) & 0xffL) << 16) - | ((UnsafeUtil.getByte(tempPos + 3) & 0xffL) << 24) - | ((UnsafeUtil.getByte(tempPos + 4) & 0xffL) << 32) - | ((UnsafeUtil.getByte(tempPos + 5) & 0xffL) << 40) - | ((UnsafeUtil.getByte(tempPos + 6) & 0xffL) << 48) - | ((UnsafeUtil.getByte(tempPos + 7) & 0xffL) << 56)); - } - - @Override - public void enableAliasing(boolean enabled) { - this.enableAliasing = enabled; - } - - @Override - public void resetSizeCounter() { - startPos = pos; - } - - @Override - public int pushLimit(int byteLimit) throws InvalidProtocolBufferException { - if (byteLimit < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - byteLimit += getTotalBytesRead(); - final int oldLimit = currentLimit; - if (byteLimit > oldLimit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - currentLimit = byteLimit; - - recomputeBufferSizeAfterLimit(); - - return oldLimit; - } - - @Override - public void popLimit(final int oldLimit) { - currentLimit = oldLimit; - recomputeBufferSizeAfterLimit(); - } - - @Override - public int getBytesUntilLimit() { - if (currentLimit == Integer.MAX_VALUE) { - return -1; - } - - return currentLimit - getTotalBytesRead(); - } - - @Override - public boolean isAtEnd() throws IOException { - return pos == limit; - } - - @Override - public int getTotalBytesRead() { - return (int) (pos - startPos); - } - - @Override - public byte readRawByte() throws IOException { - if (pos == limit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - return UnsafeUtil.getByte(pos++); - } - - @Override - public byte[] readRawBytes(final int length) throws IOException { - if (length >= 0 && length <= remaining()) { - byte[] bytes = new byte[length]; - slice(pos, pos + length).get(bytes); - pos += length; - return bytes; - } - - if (length <= 0) { - if (length == 0) { - return EMPTY_BYTE_ARRAY; - } else { - throw InvalidProtocolBufferException.negativeSize(); - } - } - - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public void skipRawBytes(final int length) throws IOException { - if (length >= 0 && length <= remaining()) { - // We have all the bytes we need already. - pos += length; - return; - } - - if (length < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - private void recomputeBufferSizeAfterLimit() { - limit += bufferSizeAfterLimit; - final int bufferEnd = (int) (limit - startPos); - if (bufferEnd > currentLimit) { - // Limit is in current buffer. - bufferSizeAfterLimit = bufferEnd - currentLimit; - limit -= bufferSizeAfterLimit; - } else { - bufferSizeAfterLimit = 0; - } - } - - private int remaining() { - return (int) (limit - pos); - } - - private int bufferPos(long pos) { - return (int) (pos - address); - } - - private ByteBuffer slice(long begin, long end) throws IOException { - int prevPos = buffer.position(); - int prevLimit = buffer.limit(); - try { - buffer.position(bufferPos(begin)); - buffer.limit(bufferPos(end)); - return buffer.slice(); - } catch (IllegalArgumentException e) { - throw InvalidProtocolBufferException.truncatedMessage(); - } finally { - buffer.position(prevPos); - buffer.limit(prevLimit); - } - } - - private ByteBuffer copy(long begin, long end) throws IOException { - return ByteBuffer.wrap(copyToArray(begin, end)); - } - - private byte[] copyToArray(long begin, long end) throws IOException { - int prevPos = buffer.position(); - int prevLimit = buffer.limit(); - try { - buffer.position(bufferPos(begin)); - buffer.limit(bufferPos(end)); - byte[] bytes = new byte[(int) (end - begin)]; - buffer.get(bytes); - return bytes; - } catch (IllegalArgumentException e) { - throw InvalidProtocolBufferException.truncatedMessage(); - } finally { - buffer.position(prevPos); - buffer.limit(prevLimit); - } - } - } - - /** - * Implementation of {@link CodedInputStream} that uses an {@link InputStream} as the data source. - */ - private static final class StreamDecoder extends CodedInputStream { - private final InputStream input; - private final byte[] buffer; - /** bufferSize represents how many bytes are currently filled in the buffer */ - private int bufferSize; - - private int bufferSizeAfterLimit; - private int pos; - private int lastTag; - - /** - * The total number of bytes read before the current buffer. The total bytes read up to the - * current position can be computed as {@code totalBytesRetired + pos}. This value may be - * negative if reading started in the middle of the current buffer (e.g. if the constructor that - * takes a byte array and an offset was used). - */ - private int totalBytesRetired; - - /** The absolute position of the end of the current message. */ - private int currentLimit = Integer.MAX_VALUE; - - private StreamDecoder(final InputStream input, int bufferSize) { - checkNotNull(input, "input"); - this.input = input; - this.buffer = new byte[bufferSize]; - this.bufferSize = 0; - pos = 0; - totalBytesRetired = 0; - } - - @Override - public int readTag() throws IOException { - if (isAtEnd()) { - lastTag = 0; - return 0; - } - - lastTag = readRawVarint32(); - if (WireFormat.getTagFieldNumber(lastTag) == 0) { - // If we actually read zero (or any tag number corresponding to field - // number zero), that's not a valid tag. - throw InvalidProtocolBufferException.invalidTag(); - } - return lastTag; - } - - @Override - public void checkLastTagWas(final int value) throws InvalidProtocolBufferException { - if (lastTag != value) { - throw InvalidProtocolBufferException.invalidEndTag(); - } - } - - @Override - public int getLastTag() { - return lastTag; - } - - @Override - public boolean skipField(final int tag) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - skipRawVarint(); - return true; - case WireFormat.WIRETYPE_FIXED64: - skipRawBytes(FIXED_64_SIZE); - return true; - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - skipRawBytes(readRawVarint32()); - return true; - case WireFormat.WIRETYPE_START_GROUP: - skipMessage(); - checkLastTagWas( - WireFormat.makeTag(WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP)); - return true; - case WireFormat.WIRETYPE_END_GROUP: - return false; - case WireFormat.WIRETYPE_FIXED32: - skipRawBytes(FIXED_32_SIZE); - return true; - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public boolean skipField(final int tag, final CodedOutputStream output) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - { - long value = readInt64(); - output.writeRawVarint32(tag); - output.writeUInt64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_FIXED64: - { - long value = readRawLittleEndian64(); - output.writeRawVarint32(tag); - output.writeFixed64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - { - ByteString value = readBytes(); - output.writeRawVarint32(tag); - output.writeBytesNoTag(value); - return true; - } - case WireFormat.WIRETYPE_START_GROUP: - { - output.writeRawVarint32(tag); - skipMessage(output); - int endtag = - WireFormat.makeTag( - WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP); - checkLastTagWas(endtag); - output.writeRawVarint32(endtag); - return true; - } - case WireFormat.WIRETYPE_END_GROUP: - { - return false; - } - case WireFormat.WIRETYPE_FIXED32: - { - int value = readRawLittleEndian32(); - output.writeRawVarint32(tag); - output.writeFixed32NoTag(value); - return true; - } - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public void skipMessage() throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag)) { - return; - } - } - } - - @Override - public void skipMessage(CodedOutputStream output) throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag, output)) { - return; - } - } - } - - /** Collects the bytes skipped and returns the data in a ByteBuffer. */ - private class SkippedDataSink implements RefillCallback { - private int lastPos = pos; - private ByteArrayOutputStream byteArrayStream; - - @Override - public void onRefill() { - if (byteArrayStream == null) { - byteArrayStream = new ByteArrayOutputStream(); - } - byteArrayStream.write(buffer, lastPos, pos - lastPos); - lastPos = 0; - } - - /** Gets skipped data in a ByteBuffer. This method should only be called once. */ - ByteBuffer getSkippedData() { - if (byteArrayStream == null) { - return ByteBuffer.wrap(buffer, lastPos, pos - lastPos); - } else { - byteArrayStream.write(buffer, lastPos, pos); - return ByteBuffer.wrap(byteArrayStream.toByteArray()); - } - } - } - - - // ----------------------------------------------------------------- - - @Override - public double readDouble() throws IOException { - return Double.longBitsToDouble(readRawLittleEndian64()); - } - - @Override - public float readFloat() throws IOException { - return Float.intBitsToFloat(readRawLittleEndian32()); - } - - @Override - public long readUInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public long readInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public int readInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public long readFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public boolean readBool() throws IOException { - return readRawVarint64() != 0; - } - - @Override - public String readString() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= (bufferSize - pos)) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - final String result = new String(buffer, pos, size, UTF_8); - pos += size; - return result; - } - if (size == 0) { - return ""; - } - if (size <= bufferSize) { - refillBuffer(size); - String result = new String(buffer, pos, size, UTF_8); - pos += size; - return result; - } - // Slow path: Build a byte array first then copy it. - return new String(readRawBytesSlowPath(size), UTF_8); - } - - @Override - public String readStringRequireUtf8() throws IOException { - final int size = readRawVarint32(); - final byte[] bytes; - final int oldPos = pos; - final int tempPos; - if (size <= (bufferSize - oldPos) && size > 0) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - bytes = buffer; - pos = oldPos + size; - tempPos = oldPos; - } else if (size == 0) { - return ""; - } else if (size <= bufferSize) { - refillBuffer(size); - bytes = buffer; - tempPos = 0; - pos = tempPos + size; - } else { - // Slow path: Build a byte array first then copy it. - bytes = readRawBytesSlowPath(size); - tempPos = 0; - } - // TODO(martinrb): We could save a pass by validating while decoding. - if (!Utf8.isValidUtf8(bytes, tempPos, tempPos + size)) { - throw InvalidProtocolBufferException.invalidUtf8(); - } - return new String(bytes, tempPos, size, UTF_8); - } - - @Override - public void readGroup( - final int fieldNumber, - final MessageLite.Builder builder, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - } - - - @Override - public T readGroup( - final int fieldNumber, - final Parser parser, - final ExtensionRegistryLite extensionRegistry) - throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - return result; - } - - @Deprecated - @Override - public void readUnknownGroup(final int fieldNumber, final MessageLite.Builder builder) - throws IOException { - readGroup(fieldNumber, builder, ExtensionRegistryLite.getEmptyRegistry()); - } - - @Override - public void readMessage( - final MessageLite.Builder builder, final ExtensionRegistryLite extensionRegistry) - throws IOException { - final int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - } - - - @Override - public T readMessage( - final Parser parser, final ExtensionRegistryLite extensionRegistry) throws IOException { - int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - return result; - } - - @Override - public ByteString readBytes() throws IOException { - final int size = readRawVarint32(); - if (size <= (bufferSize - pos) && size > 0) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - final ByteString result = ByteString.copyFrom(buffer, pos, size); - pos += size; - return result; - } - if (size == 0) { - return ByteString.EMPTY; - } - // Slow path: Build a byte array first then copy it. - return ByteString.wrap(readRawBytesSlowPath(size)); - } - - @Override - public byte[] readByteArray() throws IOException { - final int size = readRawVarint32(); - if (size <= (bufferSize - pos) && size > 0) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - final byte[] result = Arrays.copyOfRange(buffer, pos, pos + size); - pos += size; - return result; - } else { - // Slow path: Build a byte array first then copy it. - return readRawBytesSlowPath(size); - } - } - - @Override - public ByteBuffer readByteBuffer() throws IOException { - final int size = readRawVarint32(); - if (size <= (bufferSize - pos) && size > 0) { - // Fast path: We already have the bytes in a contiguous buffer. - ByteBuffer result = ByteBuffer.wrap(Arrays.copyOfRange(buffer, pos, pos + size)); - pos += size; - return result; - } - if (size == 0) { - return Internal.EMPTY_BYTE_BUFFER; - } - // Slow path: Build a byte array first then copy it. - return ByteBuffer.wrap(readRawBytesSlowPath(size)); - } - - @Override - public int readUInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public int readEnum() throws IOException { - return readRawVarint32(); - } - - @Override - public int readSFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public long readSFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readSInt32() throws IOException { - return decodeZigZag32(readRawVarint32()); - } - - @Override - public long readSInt64() throws IOException { - return decodeZigZag64(readRawVarint64()); - } - - // ================================================================= - - @Override - public int readRawVarint32() throws IOException { - // See implementation notes for readRawVarint64 - fastpath: - { - int tempPos = pos; - - if (bufferSize == tempPos) { - break fastpath; - } - - final byte[] buffer = this.buffer; - int x; - if ((x = buffer[tempPos++]) >= 0) { - pos = tempPos; - return x; - } else if (bufferSize - tempPos < 9) { - break fastpath; - } else if ((x ^= (buffer[tempPos++] << 7)) < 0) { - x ^= (~0 << 7); - } else if ((x ^= (buffer[tempPos++] << 14)) >= 0) { - x ^= (~0 << 7) ^ (~0 << 14); - } else if ((x ^= (buffer[tempPos++] << 21)) < 0) { - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21); - } else { - int y = buffer[tempPos++]; - x ^= y << 28; - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) ^ (~0 << 28); - if (y < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0 - && buffer[tempPos++] < 0) { - break fastpath; // Will throw malformedVarint() - } - } - pos = tempPos; - return x; - } - return (int) readRawVarint64SlowPath(); - } - - private void skipRawVarint() throws IOException { - if (bufferSize - pos >= MAX_VARINT_SIZE) { - skipRawVarintFastPath(); - } else { - skipRawVarintSlowPath(); - } - } - - private void skipRawVarintFastPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (buffer[pos++] >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - private void skipRawVarintSlowPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (readRawByte() >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public long readRawVarint64() throws IOException { - // Implementation notes: - // - // Optimized for one-byte values, expected to be common. - // The particular code below was selected from various candidates - // empirically, by winning VarintBenchmark. - // - // Sign extension of (signed) Java bytes is usually a nuisance, but - // we exploit it here to more easily obtain the sign of bytes read. - // Instead of cleaning up the sign extension bits by masking eagerly, - // we delay until we find the final (positive) byte, when we clear all - // accumulated bits with one xor. We depend on javac to constant fold. - fastpath: - { - int tempPos = pos; - - if (bufferSize == tempPos) { - break fastpath; - } - - final byte[] buffer = this.buffer; - long x; - int y; - if ((y = buffer[tempPos++]) >= 0) { - pos = tempPos; - return y; - } else if (bufferSize - tempPos < 9) { - break fastpath; - } else if ((y ^= (buffer[tempPos++] << 7)) < 0) { - x = y ^ (~0 << 7); - } else if ((y ^= (buffer[tempPos++] << 14)) >= 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14)); - } else if ((y ^= (buffer[tempPos++] << 21)) < 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21)); - } else if ((x = y ^ ((long) buffer[tempPos++] << 28)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28); - } else if ((x ^= ((long) buffer[tempPos++] << 35)) < 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35); - } else if ((x ^= ((long) buffer[tempPos++] << 42)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42); - } else if ((x ^= ((long) buffer[tempPos++] << 49)) < 0L) { - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49); - } else { - x ^= ((long) buffer[tempPos++] << 56); - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49) - ^ (~0L << 56); - if (x < 0L) { - if (buffer[tempPos++] < 0L) { - break fastpath; // Will throw malformedVarint() - } - } - } - pos = tempPos; - return x; - } - return readRawVarint64SlowPath(); - } - - @Override - long readRawVarint64SlowPath() throws IOException { - long result = 0; - for (int shift = 0; shift < 64; shift += 7) { - final byte b = readRawByte(); - result |= (long) (b & 0x7F) << shift; - if ((b & 0x80) == 0) { - return result; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public int readRawLittleEndian32() throws IOException { - int tempPos = pos; - - if (bufferSize - tempPos < FIXED_32_SIZE) { - refillBuffer(FIXED_32_SIZE); - tempPos = pos; - } - - final byte[] buffer = this.buffer; - pos = tempPos + FIXED_32_SIZE; - return (((buffer[tempPos] & 0xff)) - | ((buffer[tempPos + 1] & 0xff) << 8) - | ((buffer[tempPos + 2] & 0xff) << 16) - | ((buffer[tempPos + 3] & 0xff) << 24)); - } - - @Override - public long readRawLittleEndian64() throws IOException { - int tempPos = pos; - - if (bufferSize - tempPos < FIXED_64_SIZE) { - refillBuffer(FIXED_64_SIZE); - tempPos = pos; - } - - final byte[] buffer = this.buffer; - pos = tempPos + FIXED_64_SIZE; - return (((buffer[tempPos] & 0xffL)) - | ((buffer[tempPos + 1] & 0xffL) << 8) - | ((buffer[tempPos + 2] & 0xffL) << 16) - | ((buffer[tempPos + 3] & 0xffL) << 24) - | ((buffer[tempPos + 4] & 0xffL) << 32) - | ((buffer[tempPos + 5] & 0xffL) << 40) - | ((buffer[tempPos + 6] & 0xffL) << 48) - | ((buffer[tempPos + 7] & 0xffL) << 56)); - } - - // ----------------------------------------------------------------- - - @Override - public void enableAliasing(boolean enabled) { - // TODO(nathanmittler): Ideally we should throw here. Do nothing for backward compatibility. - } - - @Override - public void resetSizeCounter() { - totalBytesRetired = -pos; - } - - @Override - public int pushLimit(int byteLimit) throws InvalidProtocolBufferException { - if (byteLimit < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - byteLimit += totalBytesRetired + pos; - final int oldLimit = currentLimit; - if (byteLimit > oldLimit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - currentLimit = byteLimit; - - recomputeBufferSizeAfterLimit(); - - return oldLimit; - } - - private void recomputeBufferSizeAfterLimit() { - bufferSize += bufferSizeAfterLimit; - final int bufferEnd = totalBytesRetired + bufferSize; - if (bufferEnd > currentLimit) { - // Limit is in current buffer. - bufferSizeAfterLimit = bufferEnd - currentLimit; - bufferSize -= bufferSizeAfterLimit; - } else { - bufferSizeAfterLimit = 0; - } - } - - @Override - public void popLimit(final int oldLimit) { - currentLimit = oldLimit; - recomputeBufferSizeAfterLimit(); - } - - @Override - public int getBytesUntilLimit() { - if (currentLimit == Integer.MAX_VALUE) { - return -1; - } - - final int currentAbsolutePosition = totalBytesRetired + pos; - return currentLimit - currentAbsolutePosition; - } - - @Override - public boolean isAtEnd() throws IOException { - return pos == bufferSize && !tryRefillBuffer(1); - } - - @Override - public int getTotalBytesRead() { - return totalBytesRetired + pos; - } - - private interface RefillCallback { - void onRefill(); - } - - private RefillCallback refillCallback = null; - - /** - * Reads more bytes from the input, making at least {@code n} bytes available in the buffer. - * Caller must ensure that the requested space is not yet available, and that the requested - * space is less than BUFFER_SIZE. - * - * @throws InvalidProtocolBufferException The end of the stream or the current limit was - * reached. - */ - private void refillBuffer(int n) throws IOException { - if (!tryRefillBuffer(n)) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - } - - /** - * Tries to read more bytes from the input, making at least {@code n} bytes available in the - * buffer. Caller must ensure that the requested space is not yet available, and that the - * requested space is less than BUFFER_SIZE. - * - * @return {@code true} if the bytes could be made available; {@code false} if the end of the - * stream or the current limit was reached. - */ - private boolean tryRefillBuffer(int n) throws IOException { - if (pos + n <= bufferSize) { - throw new IllegalStateException( - "refillBuffer() called when " + n + " bytes were already available in buffer"); - } - - if (totalBytesRetired + pos + n > currentLimit) { - // Oops, we hit a limit. - return false; - } - - if (refillCallback != null) { - refillCallback.onRefill(); - } - - int tempPos = pos; - if (tempPos > 0) { - if (bufferSize > tempPos) { - System.arraycopy(buffer, tempPos, buffer, 0, bufferSize - tempPos); - } - totalBytesRetired += tempPos; - bufferSize -= tempPos; - pos = 0; - } - - int bytesRead = input.read(buffer, bufferSize, buffer.length - bufferSize); - if (bytesRead == 0 || bytesRead < -1 || bytesRead > buffer.length) { - throw new IllegalStateException( - "InputStream#read(byte[]) returned invalid result: " - + bytesRead - + "\nThe InputStream implementation is buggy."); - } - if (bytesRead > 0) { - bufferSize += bytesRead; - // Integer-overflow-conscious check against sizeLimit - if (totalBytesRetired + n - sizeLimit > 0) { - throw InvalidProtocolBufferException.sizeLimitExceeded(); - } - recomputeBufferSizeAfterLimit(); - return (bufferSize >= n) ? true : tryRefillBuffer(n); - } - - return false; - } - - @Override - public byte readRawByte() throws IOException { - if (pos == bufferSize) { - refillBuffer(1); - } - return buffer[pos++]; - } - - @Override - public byte[] readRawBytes(final int size) throws IOException { - final int tempPos = pos; - if (size <= (bufferSize - tempPos) && size > 0) { - pos = tempPos + size; - return Arrays.copyOfRange(buffer, tempPos, tempPos + size); - } else { - return readRawBytesSlowPath(size); - } - } - - /** - * Exactly like readRawBytes, but caller must have already checked the fast path: (size <= - * (bufferSize - pos) && size > 0) - */ - private byte[] readRawBytesSlowPath(final int size) throws IOException { - if (size == 0) { - return Internal.EMPTY_BYTE_ARRAY; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - - // Integer-overflow-conscious check that the message size so far has not exceeded sizeLimit. - int currentMessageSize = totalBytesRetired + pos + size; - if (currentMessageSize - sizeLimit > 0) { - throw InvalidProtocolBufferException.sizeLimitExceeded(); - } - - // Verify that the message size so far has not exceeded currentLimit. - if (currentMessageSize > currentLimit) { - // Read to the end of the stream anyway. - skipRawBytes(currentLimit - totalBytesRetired - pos); - throw InvalidProtocolBufferException.truncatedMessage(); - } - - final int originalBufferPos = pos; - final int bufferedBytes = bufferSize - pos; - - // Mark the current buffer consumed. - totalBytesRetired += bufferSize; - pos = 0; - bufferSize = 0; - - // Determine the number of bytes we need to read from the input stream. - int sizeLeft = size - bufferedBytes; - // TODO(nathanmittler): Consider using a value larger than DEFAULT_BUFFER_SIZE. - if (sizeLeft < DEFAULT_BUFFER_SIZE || sizeLeft <= input.available()) { - // Either the bytes we need are known to be available, or the required buffer is - // within an allowed threshold - go ahead and allocate the buffer now. - final byte[] bytes = new byte[size]; - - // Copy all of the buffered bytes to the result buffer. - System.arraycopy(buffer, originalBufferPos, bytes, 0, bufferedBytes); - - // Fill the remaining bytes from the input stream. - int tempPos = bufferedBytes; - while (tempPos < bytes.length) { - int n = input.read(bytes, tempPos, size - tempPos); - if (n == -1) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - totalBytesRetired += n; - tempPos += n; - } - - return bytes; - } - - // The size is very large. For security reasons, we can't allocate the - // entire byte array yet. The size comes directly from the input, so a - // maliciously-crafted message could provide a bogus very large size in - // order to trick the app into allocating a lot of memory. We avoid this - // by allocating and reading only a small chunk at a time, so that the - // malicious message must actually *be* extremely large to cause - // problems. Meanwhile, we limit the allowed size of a message elsewhere. - final List chunks = new ArrayList(); - - while (sizeLeft > 0) { - // TODO(nathanmittler): Consider using a value larger than DEFAULT_BUFFER_SIZE. - final byte[] chunk = new byte[Math.min(sizeLeft, DEFAULT_BUFFER_SIZE)]; - int tempPos = 0; - while (tempPos < chunk.length) { - final int n = input.read(chunk, tempPos, chunk.length - tempPos); - if (n == -1) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - totalBytesRetired += n; - tempPos += n; - } - sizeLeft -= chunk.length; - chunks.add(chunk); - } - - // OK, got everything. Now concatenate it all into one buffer. - final byte[] bytes = new byte[size]; - - // Start by copying the leftover bytes from this.buffer. - System.arraycopy(buffer, originalBufferPos, bytes, 0, bufferedBytes); - - // And now all the chunks. - int tempPos = bufferedBytes; - for (final byte[] chunk : chunks) { - System.arraycopy(chunk, 0, bytes, tempPos, chunk.length); - tempPos += chunk.length; - } - - // Done. - return bytes; - } - - @Override - public void skipRawBytes(final int size) throws IOException { - if (size <= (bufferSize - pos) && size >= 0) { - // We have all the bytes we need already. - pos += size; - } else { - skipRawBytesSlowPath(size); - } - } - - /** - * Exactly like skipRawBytes, but caller must have already checked the fast path: (size <= - * (bufferSize - pos) && size >= 0) - */ - private void skipRawBytesSlowPath(final int size) throws IOException { - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - - if (totalBytesRetired + pos + size > currentLimit) { - // Read to the end of the stream anyway. - skipRawBytes(currentLimit - totalBytesRetired - pos); - // Then fail. - throw InvalidProtocolBufferException.truncatedMessage(); - } - - // Skipping more bytes than are in the buffer. First skip what we have. - int tempPos = bufferSize - pos; - pos = bufferSize; - - // Keep refilling the buffer until we get to the point we wanted to skip to. - // This has the side effect of ensuring the limits are updated correctly. - refillBuffer(1); - while (size - tempPos > bufferSize) { - tempPos += bufferSize; - pos = bufferSize; - refillBuffer(1); - } - - pos = size - tempPos; - } - } - - private static final class ByteInputDecoder extends CodedInputStream { - - private final ByteInput buffer; - private final boolean immutable; - private int limit; - private int bufferSizeAfterLimit; - private int pos; - private int startPos; - private int lastTag; - private boolean enableAliasing; - - /** The absolute position of the end of the current message. */ - private int currentLimit = Integer.MAX_VALUE; - - private ByteInputDecoder(ByteInput buffer, boolean immutable) { - this(buffer, 0, buffer.size(), immutable); - } - - private ByteInputDecoder(ByteInput buffer, int off, int len, boolean immutable) { - this.buffer = buffer; - pos = off; - limit = off + len; - startPos = pos; - this.immutable = immutable; - } - - @Override - public int readTag() throws IOException { - if (isAtEnd()) { - lastTag = 0; - return 0; - } - - lastTag = readRawVarint32(); - if (WireFormat.getTagFieldNumber(lastTag) == 0) { - // If we actually read zero (or any tag number corresponding to field - // number zero), that's not a valid tag. - throw InvalidProtocolBufferException.invalidTag(); - } - return lastTag; - } - - @Override - public void checkLastTagWas(int value) throws InvalidProtocolBufferException { - if (lastTag != value) { - throw InvalidProtocolBufferException.invalidEndTag(); - } - } - - @Override - public int getLastTag() { - return lastTag; - } - - @Override - public boolean skipField(int tag) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - skipRawVarint(); - return true; - case WireFormat.WIRETYPE_FIXED64: - skipRawBytes(FIXED_64_SIZE); - return true; - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - skipRawBytes(readRawVarint32()); - return true; - case WireFormat.WIRETYPE_START_GROUP: - skipMessage(); - checkLastTagWas( - WireFormat.makeTag(WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP)); - return true; - case WireFormat.WIRETYPE_END_GROUP: - return false; - case WireFormat.WIRETYPE_FIXED32: - skipRawBytes(FIXED_32_SIZE); - return true; - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public boolean skipField(int tag, CodedOutputStream output) throws IOException { - switch (WireFormat.getTagWireType(tag)) { - case WireFormat.WIRETYPE_VARINT: - { - long value = readInt64(); - output.writeRawVarint32(tag); - output.writeUInt64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_FIXED64: - { - long value = readRawLittleEndian64(); - output.writeRawVarint32(tag); - output.writeFixed64NoTag(value); - return true; - } - case WireFormat.WIRETYPE_LENGTH_DELIMITED: - { - ByteString value = readBytes(); - output.writeRawVarint32(tag); - output.writeBytesNoTag(value); - return true; - } - case WireFormat.WIRETYPE_START_GROUP: - { - output.writeRawVarint32(tag); - skipMessage(output); - int endtag = - WireFormat.makeTag( - WireFormat.getTagFieldNumber(tag), WireFormat.WIRETYPE_END_GROUP); - checkLastTagWas(endtag); - output.writeRawVarint32(endtag); - return true; - } - case WireFormat.WIRETYPE_END_GROUP: - { - return false; - } - case WireFormat.WIRETYPE_FIXED32: - { - int value = readRawLittleEndian32(); - output.writeRawVarint32(tag); - output.writeFixed32NoTag(value); - return true; - } - default: - throw InvalidProtocolBufferException.invalidWireType(); - } - } - - @Override - public void skipMessage() throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag)) { - return; - } - } - } - - @Override - public void skipMessage(CodedOutputStream output) throws IOException { - while (true) { - final int tag = readTag(); - if (tag == 0 || !skipField(tag, output)) { - return; - } - } - } - - public double readDouble() throws IOException { - return Double.longBitsToDouble(readRawLittleEndian64()); - } - - @Override - public float readFloat() throws IOException { - return Float.intBitsToFloat(readRawLittleEndian32()); - } - - @Override - public long readUInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public long readInt64() throws IOException { - return readRawVarint64(); - } - - @Override - public int readInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public long readFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public boolean readBool() throws IOException { - return readRawVarint64() != 0; - } - - @Override - public String readString() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= remaining()) { - byte[] bytes = copyToArray(pos, size); - pos += size; - return new String(bytes, UTF_8); - } - - if (size == 0) { - return ""; - } - if (size < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public String readStringRequireUtf8() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= remaining()) { - if (!Utf8.isValidUtf8(buffer, pos, pos + size)) { - throw InvalidProtocolBufferException.invalidUtf8(); - } - byte[] bytes = copyToArray(pos, size); - pos += size; - return new String(bytes, UTF_8); - } - - if (size == 0) { - return ""; - } - if (size <= 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public void readGroup(int fieldNumber, MessageLite.Builder builder, - ExtensionRegistryLite extensionRegistry) throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - } - - @Override - public T readGroup(int fieldNumber, Parser parser, - ExtensionRegistryLite extensionRegistry) throws IOException { - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP)); - --recursionDepth; - return result; - } - - @Deprecated - @Override - public void readUnknownGroup(int fieldNumber, MessageLite.Builder builder) throws IOException { - readGroup(fieldNumber, builder, ExtensionRegistryLite.getEmptyRegistry()); - } - - @Override - public void readMessage(MessageLite.Builder builder, ExtensionRegistryLite extensionRegistry) - throws IOException { - final int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - builder.mergeFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - } - - @Override - public T readMessage(Parser parser, - ExtensionRegistryLite extensionRegistry) throws IOException { - int length = readRawVarint32(); - if (recursionDepth >= recursionLimit) { - throw InvalidProtocolBufferException.recursionLimitExceeded(); - } - final int oldLimit = pushLimit(length); - ++recursionDepth; - T result = parser.parsePartialFrom(this, extensionRegistry); - checkLastTagWas(0); - --recursionDepth; - popLimit(oldLimit); - return result; - } - - @Override - public ByteString readBytes() throws IOException { - final int size = readRawVarint32(); - if (size > 0 && size <= (limit - pos)) { - // Fast path: We already have the bytes in a contiguous buffer, so - // just copy directly from it. - - final ByteString result = - immutable && enableAliasing - ? ByteString.wrap(buffer, pos, size) - : ByteString.wrap(copyToArray(pos, size)); - pos += size; - return result; - } - if (size == 0) { - return ByteString.EMPTY; - } - // Slow path: Build a byte array first then copy it. - return ByteString.wrap(readRawBytes(size)); - } - - @Override - public byte[] readByteArray() throws IOException { - return readRawBytes(readRawVarint32()); - } - - @Override - public ByteBuffer readByteBuffer() throws IOException { - return ByteBuffer.wrap(readByteArray()); - } - - @Override - public int readUInt32() throws IOException { - return readRawVarint32(); - } - - @Override - public int readEnum() throws IOException { - return readRawVarint32(); - } - - @Override - public int readSFixed32() throws IOException { - return readRawLittleEndian32(); - } - - @Override - public long readSFixed64() throws IOException { - return readRawLittleEndian64(); - } - - @Override - public int readSInt32() throws IOException { - return decodeZigZag32(readRawVarint32()); - } - - @Override - public long readSInt64() throws IOException { - return decodeZigZag64(readRawVarint64()); - } - - @Override - public int readRawVarint32() throws IOException { - // See implementation notes for readRawVarint64 - fastpath: - { - int tempPos = pos; - - if (limit == tempPos) { - break fastpath; - } - - int x; - if ((x = buffer.read(tempPos++)) >= 0) { - pos = tempPos; - return x; - } else if (limit - tempPos < 9) { - break fastpath; - } else if ((x ^= (buffer.read(tempPos++) << 7)) < 0) { - x ^= (~0 << 7); - } else if ((x ^= (buffer.read(tempPos++) << 14)) >= 0) { - x ^= (~0 << 7) ^ (~0 << 14); - } else if ((x ^= (buffer.read(tempPos++) << 21)) < 0) { - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21); - } else { - int y = buffer.read(tempPos++); - x ^= y << 28; - x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) ^ (~0 << 28); - if (y < 0 - && buffer.read(tempPos++) < 0 - && buffer.read(tempPos++) < 0 - && buffer.read(tempPos++) < 0 - && buffer.read(tempPos++) < 0 - && buffer.read(tempPos++) < 0) { - break fastpath; // Will throw malformedVarint() - } - } - pos = tempPos; - return x; - } - return (int) readRawVarint64SlowPath(); - } - - @Override - public long readRawVarint64() throws IOException { - fastpath: - { - int tempPos = pos; - - if (limit == tempPos) { - break fastpath; - } - - long x; - int y; - if ((y = buffer.read(tempPos++)) >= 0) { - pos = tempPos; - return y; - } else if (limit - tempPos < 9) { - break fastpath; - } else if ((y ^= (buffer.read(tempPos++) << 7)) < 0) { - x = y ^ (~0 << 7); - } else if ((y ^= (buffer.read(tempPos++) << 14)) >= 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14)); - } else if ((y ^= (buffer.read(tempPos++) << 21)) < 0) { - x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21)); - } else if ((x = y ^ ((long) buffer.read(tempPos++) << 28)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28); - } else if ((x ^= ((long) buffer.read(tempPos++) << 35)) < 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35); - } else if ((x ^= ((long) buffer.read(tempPos++) << 42)) >= 0L) { - x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42); - } else if ((x ^= ((long) buffer.read(tempPos++) << 49)) < 0L) { - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49); - } else { - x ^= ((long) buffer.read(tempPos++) << 56); - x ^= - (~0L << 7) - ^ (~0L << 14) - ^ (~0L << 21) - ^ (~0L << 28) - ^ (~0L << 35) - ^ (~0L << 42) - ^ (~0L << 49) - ^ (~0L << 56); - if (x < 0L) { - if (buffer.read(tempPos++) < 0L) { - break fastpath; // Will throw malformedVarint() - } - } - } - pos = tempPos; - return x; - } - return readRawVarint64SlowPath(); - } - - @Override - long readRawVarint64SlowPath() throws IOException { - long result = 0; - for (int shift = 0; shift < 64; shift += 7) { - final byte b = readRawByte(); - result |= (long) (b & 0x7F) << shift; - if ((b & 0x80) == 0) { - return result; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - @Override - public int readRawLittleEndian32() throws IOException { - int tempPos = pos; - - if (limit - tempPos < FIXED_32_SIZE) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - - pos = tempPos + FIXED_32_SIZE; - return (((buffer.read(tempPos) & 0xff)) - | ((buffer.read(tempPos + 1) & 0xff) << 8) - | ((buffer.read(tempPos + 2) & 0xff) << 16) - | ((buffer.read(tempPos + 3) & 0xff) << 24)); - } - - @Override - public long readRawLittleEndian64() throws IOException { - int tempPos = pos; - - if (limit - tempPos < FIXED_64_SIZE) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - - pos = tempPos + FIXED_64_SIZE; - return (((buffer.read(tempPos) & 0xffL)) - | ((buffer.read(tempPos + 1) & 0xffL) << 8) - | ((buffer.read(tempPos + 2) & 0xffL) << 16) - | ((buffer.read(tempPos + 3) & 0xffL) << 24) - | ((buffer.read(tempPos + 4) & 0xffL) << 32) - | ((buffer.read(tempPos + 5) & 0xffL) << 40) - | ((buffer.read(tempPos + 6) & 0xffL) << 48) - | ((buffer.read(tempPos + 7) & 0xffL) << 56)); - } - - @Override - public void enableAliasing(boolean enabled) { - this.enableAliasing = enabled; - } - - @Override - public void resetSizeCounter() { - startPos = pos; - } - - @Override - public int pushLimit(int byteLimit) throws InvalidProtocolBufferException { - if (byteLimit < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - byteLimit += getTotalBytesRead(); - final int oldLimit = currentLimit; - if (byteLimit > oldLimit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - currentLimit = byteLimit; - - recomputeBufferSizeAfterLimit(); - - return oldLimit; - } - - @Override - public void popLimit(int oldLimit) { - currentLimit = oldLimit; - recomputeBufferSizeAfterLimit(); - } - - @Override - public int getBytesUntilLimit() { - if (currentLimit == Integer.MAX_VALUE) { - return -1; - } - - return currentLimit - getTotalBytesRead(); - } - - @Override - public boolean isAtEnd() throws IOException { - return pos == limit; - } - - @Override - public int getTotalBytesRead() { - return pos - startPos; - } - - @Override - public byte readRawByte() throws IOException { - if (pos == limit) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - return buffer.read(pos++); - } - - @Override - public byte[] readRawBytes(int length) throws IOException { - if (length > 0 && length <= (limit - pos)) { - byte[] bytes = copyToArray(pos, length); - pos += length; - return bytes; - } - - if (length <= 0) { - if (length == 0) { - return Internal.EMPTY_BYTE_ARRAY; - } else { - throw InvalidProtocolBufferException.negativeSize(); - } - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - @Override - public void skipRawBytes(int length) throws IOException { - if (length >= 0 && length <= (limit - pos)) { - // We have all the bytes we need already. - pos += length; - return; - } - - if (length < 0) { - throw InvalidProtocolBufferException.negativeSize(); - } - throw InvalidProtocolBufferException.truncatedMessage(); - } - - private void recomputeBufferSizeAfterLimit() { - limit += bufferSizeAfterLimit; - final int bufferEnd = limit - startPos; - if (bufferEnd > currentLimit) { - // Limit is in current buffer. - bufferSizeAfterLimit = bufferEnd - currentLimit; - limit -= bufferSizeAfterLimit; - } else { - bufferSizeAfterLimit = 0; - } - } - - private int remaining() { - return (int) (limit - pos); - } - - private byte[] copyToArray(int begin, int size) throws IOException { - try { - byte[] bytes = new byte[size]; - buffer.read(begin, bytes); - return bytes; - } catch (IOException e) { - throw InvalidProtocolBufferException.truncatedMessage(); - } - } - - private void skipRawVarint() throws IOException { - if (limit - pos >= MAX_VARINT_SIZE) { - skipRawVarintFastPath(); - } else { - skipRawVarintSlowPath(); - } - } - - private void skipRawVarintFastPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (buffer.read(pos++) >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - - private void skipRawVarintSlowPath() throws IOException { - for (int i = 0; i < MAX_VARINT_SIZE; i++) { - if (readRawByte() >= 0) { - return; - } - } - throw InvalidProtocolBufferException.malformedVarint(); - } - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java deleted file mode 100644 index 03871c93885..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java +++ /dev/null @@ -1,3001 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.FIXED_32_SIZE; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.FIXED_64_SIZE; -import static org.apache.hadoop.hbase.shaded.com.google.protobuf.WireFormat.MAX_VARINT_SIZE; -import static java.lang.Math.max; - -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Utf8.UnpairedSurrogateException; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.BufferOverflowException; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.logging.Level; -import java.util.logging.Logger; - -/** - * Encodes and writes protocol message fields. - * - *

This class contains two kinds of methods: methods that write specific - * protocol message constructs and field types (e.g. {@link #writeTag} and - * {@link #writeInt32}) and methods that write low-level values (e.g. - * {@link #writeRawVarint32} and {@link #writeRawBytes}). If you are - * writing encoded protocol messages, you should use the former methods, but if - * you are writing some other format of your own design, use the latter. - * - *

This class is totally unsynchronized. - */ -public abstract class CodedOutputStream extends ByteOutput { - private static final Logger logger = Logger.getLogger(CodedOutputStream.class.getName()); - private static final boolean HAS_UNSAFE_ARRAY_OPERATIONS = UnsafeUtil.hasUnsafeArrayOperations(); - private static final long ARRAY_BASE_OFFSET = UnsafeUtil.getArrayBaseOffset(); - - /** - * @deprecated Use {@link #computeFixed32SizeNoTag(int)} instead. - */ - @Deprecated - public static final int LITTLE_ENDIAN_32_SIZE = FIXED_32_SIZE; - - /** - * The buffer size used in {@link #newInstance(OutputStream)}. - */ - public static final int DEFAULT_BUFFER_SIZE = 4096; - - /** - * Returns the buffer size to efficiently write dataLength bytes to this - * CodedOutputStream. Used by AbstractMessageLite. - * - * @return the buffer size to efficiently write dataLength bytes to this - * CodedOutputStream. - */ - static int computePreferredBufferSize(int dataLength) { - if (dataLength > DEFAULT_BUFFER_SIZE) { - return DEFAULT_BUFFER_SIZE; - } - return dataLength; - } - - /** - * Create a new {@code CodedOutputStream} wrapping the given {@code OutputStream}. - * - *

NOTE: The provided {@link OutputStream} MUST NOT retain access or - * modify the provided byte arrays. Doing so may result in corrupted data, which would be - * difficult to debug. - */ - public static CodedOutputStream newInstance(final OutputStream output) { - return newInstance(output, DEFAULT_BUFFER_SIZE); - } - - /** - * Create a new {@code CodedOutputStream} wrapping the given {@code OutputStream} with a given - * buffer size. - * - *

NOTE: The provided {@link OutputStream} MUST NOT retain access or - * modify the provided byte arrays. Doing so may result in corrupted data, which would be - * difficult to debug. - */ - public static CodedOutputStream newInstance(final OutputStream output, final int bufferSize) { - return new OutputStreamEncoder(output, bufferSize); - } - - /** - * Create a new {@code CodedOutputStream} that writes directly to the given - * byte array. If more bytes are written than fit in the array, - * {@link OutOfSpaceException} will be thrown. Writing directly to a flat - * array is faster than writing to an {@code OutputStream}. See also - * {@link ByteString#newCodedBuilder}. - */ - public static CodedOutputStream newInstance(final byte[] flatArray) { - return newInstance(flatArray, 0, flatArray.length); - } - - /** - * Create a new {@code CodedOutputStream} that writes directly to the given - * byte array slice. If more bytes are written than fit in the slice, - * {@link OutOfSpaceException} will be thrown. Writing directly to a flat - * array is faster than writing to an {@code OutputStream}. See also - * {@link ByteString#newCodedBuilder}. - */ - public static CodedOutputStream newInstance( - final byte[] flatArray, final int offset, final int length) { - return new ArrayEncoder(flatArray, offset, length); - } - - /** Create a new {@code CodedOutputStream} that writes to the given {@link ByteBuffer}. */ - public static CodedOutputStream newInstance(ByteBuffer buffer) { - if (buffer.hasArray()) { - return new HeapNioEncoder(buffer); - } - if (buffer.isDirect() && !buffer.isReadOnly()) { - return UnsafeDirectNioEncoder.isSupported() - ? newUnsafeInstance(buffer) - : newSafeInstance(buffer); - } - throw new IllegalArgumentException("ByteBuffer is read-only"); - } - - /** For testing purposes only. */ - static CodedOutputStream newUnsafeInstance(ByteBuffer buffer) { - return new UnsafeDirectNioEncoder(buffer); - } - - /** For testing purposes only. */ - static CodedOutputStream newSafeInstance(ByteBuffer buffer) { - return new SafeDirectNioEncoder(buffer); - } - - /** - * Configures serialization to be deterministic. - * - *

The deterministic serialization guarantees that for a given binary, equal (defined by the - * {@code equals()} methods in protos) messages will always be serialized to the same bytes. This - * implies: - * - *

    - *
  • repeated serialization of a message will return the same bytes - *
  • different processes of the same binary (which may be executing on different machines) will - * serialize equal messages to the same bytes. - *
- * - *

Note the deterministic serialization is NOT canonical across languages; it is also unstable - * across different builds with schema changes due to unknown fields. Users who need canonical - * serialization, e.g. persistent storage in a canonical form, fingerprinting, etc, should define - * their own canonicalization specification and implement the serializer using reflection APIs - * rather than relying on this API. - * - *

Once set, the serializer will: (Note this is an implementation detail and may subject to - * change in the future) - * - *

    - *
  • sort map entries by keys in lexicographical order or numerical order. Note: For string - * keys, the order is based on comparing the Unicode value of each character in the strings. - * The order may be different from the deterministic serialization in other languages where - * maps are sorted on the lexicographical order of the UTF8 encoded keys. - *
- */ - void useDeterministicSerialization() { - serializationDeterministic = true; - } - - boolean isSerializationDeterministic() { - return serializationDeterministic; - } - private boolean serializationDeterministic; - - /** - * Create a new {@code CodedOutputStream} that writes to the given {@link ByteBuffer}. - * - * @deprecated the size parameter is no longer used since use of an internal buffer is useless - * (and wasteful) when writing to a {@link ByteBuffer}. Use {@link #newInstance(ByteBuffer)} - * instead. - */ - @Deprecated - public static CodedOutputStream newInstance(ByteBuffer byteBuffer, - @SuppressWarnings("unused") int unused) { - return newInstance(byteBuffer); - } - - /** - * Create a new {@code CodedOutputStream} that writes to the provided {@link ByteOutput}. - * - *

NOTE: The {@link ByteOutput} MUST NOT modify the provided buffers. Doing - * so may result in corrupted data, which would be difficult to debug. - * - * @param byteOutput the output target for encoded bytes. - * @param bufferSize the size of the internal scratch buffer to be used for string encoding. - * Setting this to {@code 0} will disable buffering, requiring an allocation for each encoded - * string. - */ - static CodedOutputStream newInstance(ByteOutput byteOutput, int bufferSize) { - if (bufferSize < 0) { - throw new IllegalArgumentException("bufferSize must be positive"); - } - - return new ByteOutputEncoder(byteOutput, bufferSize); - } - - // Disallow construction outside of this class. - private CodedOutputStream() { - } - - // ----------------------------------------------------------------- - - /** Encode and write a tag. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeTag(int fieldNumber, int wireType) throws IOException; - - /** Write an {@code int32} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeInt32(int fieldNumber, int value) throws IOException; - - /** Write a {@code uint32} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeUInt32(int fieldNumber, int value) throws IOException; - - /** Write a {@code sint32} field, including tag, to the stream. */ - public final void writeSInt32(final int fieldNumber, final int value) throws IOException { - writeUInt32(fieldNumber, encodeZigZag32(value)); - } - - /** Write a {@code fixed32} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeFixed32(int fieldNumber, int value) throws IOException; - - /** Write an {@code sfixed32} field, including tag, to the stream. */ - public final void writeSFixed32(final int fieldNumber, final int value) throws IOException { - writeFixed32(fieldNumber, value); - } - - /** Write an {@code int64} field, including tag, to the stream. */ - public final void writeInt64(final int fieldNumber, final long value) throws IOException { - writeUInt64(fieldNumber, value); - } - - /** Write a {@code uint64} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeUInt64(int fieldNumber, long value) throws IOException; - - /** Write an {@code sint64} field, including tag, to the stream. */ - public final void writeSInt64(final int fieldNumber, final long value) throws IOException { - writeUInt64(fieldNumber, encodeZigZag64(value)); - } - - /** Write a {@code fixed64} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeFixed64(int fieldNumber, long value) throws IOException; - - /** Write an {@code sfixed64} field, including tag, to the stream. */ - public final void writeSFixed64(final int fieldNumber, final long value) throws IOException { - writeFixed64(fieldNumber, value); - } - - /** Write a {@code float} field, including tag, to the stream. */ - public final void writeFloat(final int fieldNumber, final float value) throws IOException { - writeFixed32(fieldNumber, Float.floatToRawIntBits(value)); - } - - /** Write a {@code double} field, including tag, to the stream. */ - public final void writeDouble(final int fieldNumber, final double value) throws IOException { - writeFixed64(fieldNumber, Double.doubleToRawLongBits(value)); - } - - /** Write a {@code bool} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeBool(int fieldNumber, boolean value) throws IOException; - - /** - * Write an enum field, including tag, to the stream. The provided value is the numeric - * value used to represent the enum value on the wire (not the enum ordinal value). - */ - public final void writeEnum(final int fieldNumber, final int value) throws IOException { - writeInt32(fieldNumber, value); - } - - /** Write a {@code string} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeString(int fieldNumber, String value) throws IOException; - - /** Write a {@code bytes} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeBytes(int fieldNumber, ByteString value) throws IOException; - - /** Write a {@code bytes} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeByteArray(int fieldNumber, byte[] value) throws IOException; - - /** Write a {@code bytes} field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeByteArray(int fieldNumber, byte[] value, int offset, int length) - throws IOException; - - /** - * Write a {@code bytes} field, including tag, to the stream. - * This method will write all content of the ByteBuffer regardless of the - * current position and limit (i.e., the number of bytes to be written is - * value.capacity(), not value.remaining()). Furthermore, this method doesn't - * alter the state of the passed-in ByteBuffer. Its position, limit, mark, - * etc. will remain unchanged. If you only want to write the remaining bytes - * of a ByteBuffer, you can call - * {@code writeByteBuffer(fieldNumber, byteBuffer.slice())}. - */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeByteBuffer(int fieldNumber, ByteBuffer value) throws IOException; - - /** - * Write a single byte. - */ - public final void writeRawByte(final byte value) throws IOException { - write(value); - } - - /** Write a single byte, represented by an integer value. */ - public final void writeRawByte(final int value) throws IOException { - write((byte) value); - } - - /** Write an array of bytes. */ - public final void writeRawBytes(final byte[] value) throws IOException { - write(value, 0, value.length); - } - - /** - * Write part of an array of bytes. - */ - public final void writeRawBytes(final byte[] value, int offset, int length) throws IOException { - write(value, offset, length); - } - - /** Write a byte string. */ - public final void writeRawBytes(final ByteString value) throws IOException { - value.writeTo(this); - } - - /** - * Write a ByteBuffer. This method will write all content of the ByteBuffer - * regardless of the current position and limit (i.e., the number of bytes - * to be written is value.capacity(), not value.remaining()). Furthermore, - * this method doesn't alter the state of the passed-in ByteBuffer. Its - * position, limit, mark, etc. will remain unchanged. If you only want to - * write the remaining bytes of a ByteBuffer, you can call - * {@code writeRawBytes(byteBuffer.slice())}. - */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeRawBytes(final ByteBuffer value) throws IOException; - - /** Write an embedded message field, including tag, to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeMessage(final int fieldNumber, final MessageLite value) - throws IOException; - - /** - * Write a MessageSet extension field to the stream. For historical reasons, - * the wire format differs from normal fields. - */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeMessageSetExtension(final int fieldNumber, final MessageLite value) - throws IOException; - - /** - * Write an unparsed MessageSet extension field to the stream. For - * historical reasons, the wire format differs from normal fields. - */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeRawMessageSetExtension(final int fieldNumber, final ByteString value) - throws IOException; - - // ----------------------------------------------------------------- - - /** Write an {@code int32} field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeInt32NoTag(final int value) throws IOException; - - /** Write a {@code uint32} field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeUInt32NoTag(int value) throws IOException; - - /** Write a {@code sint32} field to the stream. */ - public final void writeSInt32NoTag(final int value) throws IOException { - writeUInt32NoTag(encodeZigZag32(value)); - } - - /** Write a {@code fixed32} field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeFixed32NoTag(int value) throws IOException; - - /** Write a {@code sfixed32} field to the stream. */ - public final void writeSFixed32NoTag(final int value) throws IOException { - writeFixed32NoTag(value); - } - - /** Write an {@code int64} field to the stream. */ - public final void writeInt64NoTag(final long value) throws IOException { - writeUInt64NoTag(value); - } - - /** Write a {@code uint64} field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeUInt64NoTag(long value) throws IOException; - - /** Write a {@code sint64} field to the stream. */ - public final void writeSInt64NoTag(final long value) throws IOException { - writeUInt64NoTag(encodeZigZag64(value)); - } - - /** Write a {@code fixed64} field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeFixed64NoTag(long value) throws IOException; - - /** Write a {@code sfixed64} field to the stream. */ - public final void writeSFixed64NoTag(final long value) throws IOException { - writeFixed64NoTag(value); - } - - /** Write a {@code float} field to the stream. */ - public final void writeFloatNoTag(final float value) throws IOException { - writeFixed32NoTag(Float.floatToRawIntBits(value)); - } - - /** Write a {@code double} field to the stream. */ - public final void writeDoubleNoTag(final double value) throws IOException { - writeFixed64NoTag(Double.doubleToRawLongBits(value)); - } - - /** Write a {@code bool} field to the stream. */ - public final void writeBoolNoTag(final boolean value) throws IOException { - write((byte) (value ? 1 : 0)); - } - - /** - * Write an enum field to the stream. The provided value is the numeric - * value used to represent the enum value on the wire (not the enum ordinal value). - */ - public final void writeEnumNoTag(final int value) throws IOException { - writeInt32NoTag(value); - } - - /** Write a {@code string} field to the stream. */ - // TODO(dweis): Document behavior on ill-formed UTF-16 input. - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeStringNoTag(String value) throws IOException; - - /** Write a {@code bytes} field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeBytesNoTag(final ByteString value) throws IOException; - - /** Write a {@code bytes} field to the stream. */ - public final void writeByteArrayNoTag(final byte[] value) throws IOException { - writeByteArrayNoTag(value, 0, value.length); - } - - /** Write an embedded message field to the stream. */ - // Abstract to avoid overhead of additional virtual method calls. - public abstract void writeMessageNoTag(final MessageLite value) throws IOException; - - //================================================================= - - @ExperimentalApi - @Override - public abstract void write(byte value) throws IOException; - - @ExperimentalApi - @Override - public abstract void write(byte[] value, int offset, int length) throws IOException; - - @ExperimentalApi - @Override - public abstract void writeLazy(byte[] value, int offset, int length) throws IOException; - - @Override - public abstract void write(ByteBuffer value) throws IOException; - - @ExperimentalApi - @Override - public abstract void writeLazy(ByteBuffer value) throws IOException; - - // ================================================================= - // ================================================================= - - /** - * Compute the number of bytes that would be needed to encode an - * {@code int32} field, including tag. - */ - public static int computeInt32Size(final int fieldNumber, final int value) { - return computeTagSize(fieldNumber) + computeInt32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code uint32} field, including tag. - */ - public static int computeUInt32Size(final int fieldNumber, final int value) { - return computeTagSize(fieldNumber) + computeUInt32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sint32} field, including tag. - */ - public static int computeSInt32Size(final int fieldNumber, final int value) { - return computeTagSize(fieldNumber) + computeSInt32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code fixed32} field, including tag. - */ - public static int computeFixed32Size(final int fieldNumber, final int value) { - return computeTagSize(fieldNumber) + computeFixed32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sfixed32} field, including tag. - */ - public static int computeSFixed32Size(final int fieldNumber, final int value) { - return computeTagSize(fieldNumber) + computeSFixed32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code int64} field, including tag. - */ - public static int computeInt64Size(final int fieldNumber, final long value) { - return computeTagSize(fieldNumber) + computeInt64SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code uint64} field, including tag. - */ - public static int computeUInt64Size(final int fieldNumber, final long value) { - return computeTagSize(fieldNumber) + computeUInt64SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sint64} field, including tag. - */ - public static int computeSInt64Size(final int fieldNumber, final long value) { - return computeTagSize(fieldNumber) + computeSInt64SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code fixed64} field, including tag. - */ - public static int computeFixed64Size(final int fieldNumber, final long value) { - return computeTagSize(fieldNumber) + computeFixed64SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sfixed64} field, including tag. - */ - public static int computeSFixed64Size(final int fieldNumber, final long value) { - return computeTagSize(fieldNumber) + computeSFixed64SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code float} field, including tag. - */ - public static int computeFloatSize(final int fieldNumber, final float value) { - return computeTagSize(fieldNumber) + computeFloatSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code double} field, including tag. - */ - public static int computeDoubleSize(final int fieldNumber, final double value) { - return computeTagSize(fieldNumber) + computeDoubleSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bool} field, including tag. - */ - public static int computeBoolSize(final int fieldNumber, final boolean value) { - return computeTagSize(fieldNumber) + computeBoolSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * enum field, including tag. The provided value is the numeric - * value used to represent the enum value on the wire (not the enum ordinal value). - */ - public static int computeEnumSize(final int fieldNumber, final int value) { - return computeTagSize(fieldNumber) + computeEnumSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code string} field, including tag. - */ - public static int computeStringSize(final int fieldNumber, final String value) { - return computeTagSize(fieldNumber) + computeStringSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bytes} field, including tag. - */ - public static int computeBytesSize(final int fieldNumber, final ByteString value) { - return computeTagSize(fieldNumber) + computeBytesSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bytes} field, including tag. - */ - public static int computeByteArraySize(final int fieldNumber, final byte[] value) { - return computeTagSize(fieldNumber) + computeByteArraySizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bytes} field, including tag. - */ - public static int computeByteBufferSize(final int fieldNumber, final ByteBuffer value) { - return computeTagSize(fieldNumber) + computeByteBufferSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * embedded message in lazy field, including tag. - */ - public static int computeLazyFieldSize(final int fieldNumber, final LazyFieldLite value) { - return computeTagSize(fieldNumber) + computeLazyFieldSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * embedded message field, including tag. - */ - public static int computeMessageSize(final int fieldNumber, final MessageLite value) { - return computeTagSize(fieldNumber) + computeMessageSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * MessageSet extension to the stream. For historical reasons, - * the wire format differs from normal fields. - */ - public static int computeMessageSetExtensionSize(final int fieldNumber, final MessageLite value) { - return computeTagSize(WireFormat.MESSAGE_SET_ITEM) * 2 - + computeUInt32Size(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber) - + computeMessageSize(WireFormat.MESSAGE_SET_MESSAGE, value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * unparsed MessageSet extension field to the stream. For - * historical reasons, the wire format differs from normal fields. - */ - public static int computeRawMessageSetExtensionSize( - final int fieldNumber, final ByteString value) { - return computeTagSize(WireFormat.MESSAGE_SET_ITEM) * 2 - + computeUInt32Size(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber) - + computeBytesSize(WireFormat.MESSAGE_SET_MESSAGE, value); - } - - /** - * Compute the number of bytes that would be needed to encode an - * lazily parsed MessageSet extension field to the stream. For - * historical reasons, the wire format differs from normal fields. - */ - public static int computeLazyFieldMessageSetExtensionSize( - final int fieldNumber, final LazyFieldLite value) { - return computeTagSize(WireFormat.MESSAGE_SET_ITEM) * 2 - + computeUInt32Size(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber) - + computeLazyFieldSize(WireFormat.MESSAGE_SET_MESSAGE, value); - } - - // ----------------------------------------------------------------- - - /** Compute the number of bytes that would be needed to encode a tag. */ - public static int computeTagSize(final int fieldNumber) { - return computeUInt32SizeNoTag(WireFormat.makeTag(fieldNumber, 0)); - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code int32} field, including tag. - */ - public static int computeInt32SizeNoTag(final int value) { - if (value >= 0) { - return computeUInt32SizeNoTag(value); - } else { - // Must sign-extend. - return MAX_VARINT_SIZE; - } - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code uint32} field. - */ - public static int computeUInt32SizeNoTag(final int value) { - if ((value & (~0 << 7)) == 0) { - return 1; - } - if ((value & (~0 << 14)) == 0) { - return 2; - } - if ((value & (~0 << 21)) == 0) { - return 3; - } - if ((value & (~0 << 28)) == 0) { - return 4; - } - return 5; - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sint32} field. - */ - public static int computeSInt32SizeNoTag(final int value) { - return computeUInt32SizeNoTag(encodeZigZag32(value)); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code fixed32} field. - */ - public static int computeFixed32SizeNoTag(@SuppressWarnings("unused") final int unused) { - return FIXED_32_SIZE; - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sfixed32} field. - */ - public static int computeSFixed32SizeNoTag(@SuppressWarnings("unused") final int unused) { - return FIXED_32_SIZE; - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code int64} field, including tag. - */ - public static int computeInt64SizeNoTag(final long value) { - return computeUInt64SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code uint64} field, including tag. - */ - public static int computeUInt64SizeNoTag(long value) { - // handle two popular special cases up front ... - if ((value & (~0L << 7)) == 0L) { - return 1; - } - if (value < 0L) { - return 10; - } - // ... leaving us with 8 remaining, which we can divide and conquer - int n = 2; - if ((value & (~0L << 35)) != 0L) { - n += 4; value >>>= 28; - } - if ((value & (~0L << 21)) != 0L) { - n += 2; value >>>= 14; - } - if ((value & (~0L << 14)) != 0L) { - n += 1; - } - return n; - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sint64} field. - */ - public static int computeSInt64SizeNoTag(final long value) { - return computeUInt64SizeNoTag(encodeZigZag64(value)); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code fixed64} field. - */ - public static int computeFixed64SizeNoTag(@SuppressWarnings("unused") final long unused) { - return FIXED_64_SIZE; - } - - /** - * Compute the number of bytes that would be needed to encode an - * {@code sfixed64} field. - */ - public static int computeSFixed64SizeNoTag(@SuppressWarnings("unused") final long unused) { - return FIXED_64_SIZE; - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code float} field, including tag. - */ - public static int computeFloatSizeNoTag(@SuppressWarnings("unused") final float unused) { - return FIXED_32_SIZE; - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code double} field, including tag. - */ - public static int computeDoubleSizeNoTag(@SuppressWarnings("unused") final double unused) { - return FIXED_64_SIZE; - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bool} field. - */ - public static int computeBoolSizeNoTag(@SuppressWarnings("unused") final boolean unused) { - return 1; - } - - /** - * Compute the number of bytes that would be needed to encode an enum field. - * The provided value is the numeric value used to represent the enum value on the wire - * (not the enum ordinal value). - */ - public static int computeEnumSizeNoTag(final int value) { - return computeInt32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code string} field. - */ - public static int computeStringSizeNoTag(final String value) { - int length; - try { - length = Utf8.encodedLength(value); - } catch (UnpairedSurrogateException e) { - // TODO(dweis): Consider using nio Charset methods instead. - final byte[] bytes = value.getBytes(Internal.UTF_8); - length = bytes.length; - } - - return computeLengthDelimitedFieldSize(length); - } - - /** - * Compute the number of bytes that would be needed to encode an embedded - * message stored in lazy field. - */ - public static int computeLazyFieldSizeNoTag(final LazyFieldLite value) { - return computeLengthDelimitedFieldSize(value.getSerializedSize()); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bytes} field. - */ - public static int computeBytesSizeNoTag(final ByteString value) { - return computeLengthDelimitedFieldSize(value.size()); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bytes} field. - */ - public static int computeByteArraySizeNoTag(final byte[] value) { - return computeLengthDelimitedFieldSize(value.length); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code bytes} field. - */ - public static int computeByteBufferSizeNoTag(final ByteBuffer value) { - return computeLengthDelimitedFieldSize(value.capacity()); - } - - /** - * Compute the number of bytes that would be needed to encode an embedded - * message field. - */ - public static int computeMessageSizeNoTag(final MessageLite value) { - return computeLengthDelimitedFieldSize(value.getSerializedSize()); - } - - static int computeLengthDelimitedFieldSize(int fieldLength) { - return computeUInt32SizeNoTag(fieldLength) + fieldLength; - } - - /** - * Encode a ZigZag-encoded 32-bit value. ZigZag encodes signed integers - * into values that can be efficiently encoded with varint. (Otherwise, - * negative values must be sign-extended to 64 bits to be varint encoded, - * thus always taking 10 bytes on the wire.) - * - * @param n A signed 32-bit integer. - * @return An unsigned 32-bit integer, stored in a signed int because - * Java has no explicit unsigned support. - */ - public static int encodeZigZag32(final int n) { - // Note: the right-shift must be arithmetic - return (n << 1) ^ (n >> 31); - } - - /** - * Encode a ZigZag-encoded 64-bit value. ZigZag encodes signed integers - * into values that can be efficiently encoded with varint. (Otherwise, - * negative values must be sign-extended to 64 bits to be varint encoded, - * thus always taking 10 bytes on the wire.) - * - * @param n A signed 64-bit integer. - * @return An unsigned 64-bit integer, stored in a signed int because - * Java has no explicit unsigned support. - */ - public static long encodeZigZag64(final long n) { - // Note: the right-shift must be arithmetic - return (n << 1) ^ (n >> 63); - } - - // ================================================================= - - /** - * Flushes the stream and forces any buffered bytes to be written. This - * does not flush the underlying OutputStream. - */ - public abstract void flush() throws IOException; - - /** - * If writing to a flat array, return the space left in the array. - * Otherwise, throws {@code UnsupportedOperationException}. - */ - public abstract int spaceLeft(); - - /** - * Verifies that {@link #spaceLeft()} returns zero. It's common to create - * a byte array that is exactly big enough to hold a message, then write to - * it with a {@code CodedOutputStream}. Calling {@code checkNoSpaceLeft()} - * after writing verifies that the message was actually as big as expected, - * which can help catch bugs. - */ - public final void checkNoSpaceLeft() { - if (spaceLeft() != 0) { - throw new IllegalStateException("Did not write as much data as expected."); - } - } - - /** - * If you create a CodedOutputStream around a simple flat array, you must - * not attempt to write more bytes than the array has space. Otherwise, - * this exception will be thrown. - */ - public static class OutOfSpaceException extends IOException { - private static final long serialVersionUID = -6947486886997889499L; - - private static final String MESSAGE = - "CodedOutputStream was writing to a flat byte array and ran out of space."; - - OutOfSpaceException() { - super(MESSAGE); - } - - OutOfSpaceException(String explanationMessage) { - super(MESSAGE + ": " + explanationMessage); - } - - OutOfSpaceException(Throwable cause) { - super(MESSAGE, cause); - } - - OutOfSpaceException(String explanationMessage, Throwable cause) { - super(MESSAGE + ": " + explanationMessage, cause); - } - } - - /** - * Get the total number of bytes successfully written to this stream. The - * returned value is not guaranteed to be accurate if exceptions have been - * found in the middle of writing. - */ - public abstract int getTotalBytesWritten(); - - // ================================================================= - - /** Write a {@code bytes} field to the stream. Visible for testing. */ - abstract void writeByteArrayNoTag(final byte[] value, final int offset, final int length) - throws IOException; - - final void inefficientWriteStringNoTag(String value, UnpairedSurrogateException cause) - throws IOException { - logger.log(Level.WARNING, - "Converting ill-formed UTF-16. Your Protocol Buffer will not round trip correctly!", cause); - - // Unfortunately there does not appear to be any way to tell Java to encode - // UTF-8 directly into our buffer, so we have to let it create its own byte - // array and then copy. - // TODO(dweis): Consider using nio Charset methods instead. - final byte[] bytes = value.getBytes(Internal.UTF_8); - try { - writeUInt32NoTag(bytes.length); - writeLazy(bytes, 0, bytes.length); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } catch (OutOfSpaceException e) { - throw e; - } - } - - // ================================================================= - - /** - * Write a {@code group} field, including tag, to the stream. - * - * @deprecated groups are deprecated. - */ - @Deprecated - public final void writeGroup(final int fieldNumber, final MessageLite value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_START_GROUP); - writeGroupNoTag(value); - writeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP); - } - - /** - * Write a {@code group} field to the stream. - * - * @deprecated groups are deprecated. - */ - @Deprecated - public final void writeGroupNoTag(final MessageLite value) throws IOException { - value.writeTo(this); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code group} field, including tag. - * - * @deprecated groups are deprecated. - */ - @Deprecated - public static int computeGroupSize(final int fieldNumber, final MessageLite value) { - return computeTagSize(fieldNumber) * 2 + computeGroupSizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a - * {@code group} field. - */ - @Deprecated - public static int computeGroupSizeNoTag(final MessageLite value) { - return value.getSerializedSize(); - } - - /** - * Encode and write a varint. {@code value} is treated as - * unsigned, so it won't be sign-extended if negative. - * - * @deprecated use {@link #writeUInt32NoTag} instead. - */ - @Deprecated - public final void writeRawVarint32(int value) throws IOException { - writeUInt32NoTag(value); - } - - /** - * Encode and write a varint. - * - * @deprecated use {@link #writeUInt64NoTag} instead. - */ - @Deprecated - public final void writeRawVarint64(long value) throws IOException { - writeUInt64NoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a varint. - * {@code value} is treated as unsigned, so it won't be sign-extended if - * negative. - * - * @deprecated use {@link #computeUInt32SizeNoTag(int)} instead. - */ - @Deprecated - public static int computeRawVarint32Size(final int value) { - return computeUInt32SizeNoTag(value); - } - - /** - * Compute the number of bytes that would be needed to encode a varint. - * - * @deprecated use {@link #computeUInt64SizeNoTag(long)} instead. - */ - @Deprecated - public static int computeRawVarint64Size(long value) { - return computeUInt64SizeNoTag(value); - } - - /** - * Write a little-endian 32-bit integer. - * - * @deprecated Use {@link #writeFixed32NoTag} instead. - */ - @Deprecated - public final void writeRawLittleEndian32(final int value) throws IOException { - writeFixed32NoTag(value); - } - - /** - * Write a little-endian 64-bit integer. - * - * @deprecated Use {@link #writeFixed64NoTag} instead. - */ - @Deprecated - public final void writeRawLittleEndian64(final long value) throws IOException { - writeFixed64NoTag(value); - } - - // ================================================================= - - /** - * A {@link CodedOutputStream} that writes directly to a byte array. - */ - private static class ArrayEncoder extends CodedOutputStream { - private final byte[] buffer; - private final int offset; - private final int limit; - private int position; - - ArrayEncoder(byte[] buffer, int offset, int length) { - if (buffer == null) { - throw new NullPointerException("buffer"); - } - if ((offset | length | (buffer.length - (offset + length))) < 0) { - throw new IllegalArgumentException(String.format( - "Array range is invalid. Buffer.length=%d, offset=%d, length=%d", - buffer.length, offset, length)); - } - this.buffer = buffer; - this.offset = offset; - position = offset; - limit = offset + length; - } - - @Override - public final void writeTag(final int fieldNumber, final int wireType) throws IOException { - writeUInt32NoTag(WireFormat.makeTag(fieldNumber, wireType)); - } - - @Override - public final void writeInt32(final int fieldNumber, final int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeInt32NoTag(value); - } - - @Override - public final void writeUInt32(final int fieldNumber, final int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeUInt32NoTag(value); - } - - @Override - public final void writeFixed32(final int fieldNumber, final int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED32); - writeFixed32NoTag(value); - } - - @Override - public final void writeUInt64(final int fieldNumber, final long value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeUInt64NoTag(value); - } - - @Override - public final void writeFixed64(final int fieldNumber, final long value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED64); - writeFixed64NoTag(value); - } - - @Override - public final void writeBool(final int fieldNumber, final boolean value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - write((byte) (value ? 1 : 0)); - } - - @Override - public final void writeString(final int fieldNumber, final String value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeStringNoTag(value); - } - - @Override - public final void writeBytes(final int fieldNumber, final ByteString value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeBytesNoTag(value); - } - - @Override - public final void writeByteArray(final int fieldNumber, final byte[] value) throws IOException { - writeByteArray(fieldNumber, value, 0, value.length); - } - - @Override - public final void writeByteArray( - final int fieldNumber, final byte[] value, final int offset, final int length) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeByteArrayNoTag(value, offset, length); - } - - @Override - public final void writeByteBuffer(final int fieldNumber, final ByteBuffer value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeUInt32NoTag(value.capacity()); - writeRawBytes(value); - } - - @Override - public final void writeBytesNoTag(final ByteString value) throws IOException { - writeUInt32NoTag(value.size()); - value.writeTo(this); - } - - @Override - public final void writeByteArrayNoTag(final byte[] value, int offset, int length) - throws IOException { - writeUInt32NoTag(length); - write(value, offset, length); - } - - @Override - public final void writeRawBytes(final ByteBuffer value) throws IOException { - if (value.hasArray()) { - write(value.array(), value.arrayOffset(), value.capacity()); - } else { - ByteBuffer duplicated = value.duplicate(); - duplicated.clear(); - write(duplicated); - } - } - - @Override - public final void writeMessage(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeMessageNoTag(value); - } - - @Override - public final void writeMessageSetExtension(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeMessage(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public final void writeRawMessageSetExtension(final int fieldNumber, final ByteString value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeBytes(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public final void writeMessageNoTag(final MessageLite value) throws IOException { - writeUInt32NoTag(value.getSerializedSize()); - value.writeTo(this); - } - - @Override - public final void write(byte value) throws IOException { - try { - buffer[position++] = value; - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1), e); - } - } - - @Override - public final void writeInt32NoTag(int value) throws IOException { - if (value >= 0) { - writeUInt32NoTag(value); - } else { - // Must sign-extend. - writeUInt64NoTag(value); - } - } - - @Override - public final void writeUInt32NoTag(int value) throws IOException { - if (HAS_UNSAFE_ARRAY_OPERATIONS && spaceLeft() >= MAX_VARINT_SIZE) { - long pos = ARRAY_BASE_OFFSET + position; - while (true) { - if ((value & ~0x7F) == 0) { - UnsafeUtil.putByte(buffer, pos++, (byte) value); - position++; - return; - } else { - UnsafeUtil.putByte(buffer, pos++, (byte) ((value & 0x7F) | 0x80)); - position++; - value >>>= 7; - } - } - } else { - try { - while (true) { - if ((value & ~0x7F) == 0) { - buffer[position++] = (byte) value; - return; - } else { - buffer[position++] = (byte) ((value & 0x7F) | 0x80); - value >>>= 7; - } - } - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1), e); - } - } - } - - @Override - public final void writeFixed32NoTag(int value) throws IOException { - try { - buffer[position++] = (byte) (value & 0xFF); - buffer[position++] = (byte) ((value >> 8) & 0xFF); - buffer[position++] = (byte) ((value >> 16) & 0xFF); - buffer[position++] = (byte) ((value >> 24) & 0xFF); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1), e); - } - } - - @Override - public final void writeUInt64NoTag(long value) throws IOException { - if (HAS_UNSAFE_ARRAY_OPERATIONS && spaceLeft() >= MAX_VARINT_SIZE) { - long pos = ARRAY_BASE_OFFSET + position; - while (true) { - if ((value & ~0x7FL) == 0) { - UnsafeUtil.putByte(buffer, pos++, (byte) value); - position++; - return; - } else { - UnsafeUtil.putByte(buffer, pos++, (byte) (((int) value & 0x7F) | 0x80)); - position++; - value >>>= 7; - } - } - } else { - try { - while (true) { - if ((value & ~0x7FL) == 0) { - buffer[position++] = (byte) value; - return; - } else { - buffer[position++] = (byte) (((int) value & 0x7F) | 0x80); - value >>>= 7; - } - } - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1), e); - } - } - } - - @Override - public final void writeFixed64NoTag(long value) throws IOException { - try { - buffer[position++] = (byte) ((int) (value) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 8) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 16) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 24) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 32) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 40) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 48) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 56) & 0xFF); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1), e); - } - } - - @Override - public final void write(byte[] value, int offset, int length) throws IOException { - try { - System.arraycopy(value, offset, buffer, position, length); - position += length; - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, length), e); - } - } - - @Override - public final void writeLazy(byte[] value, int offset, int length) throws IOException { - write(value, offset, length); - } - - @Override - public final void write(ByteBuffer value) throws IOException { - final int length = value.remaining(); - try { - value.get(buffer, position, length); - position += length; - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, length), e); - } - } - - @Override - public final void writeLazy(ByteBuffer value) throws IOException { - write(value); - } - - @Override - public final void writeStringNoTag(String value) throws IOException { - final int oldPosition = position; - try { - // UTF-8 byte length of the string is at least its UTF-16 code unit length (value.length()), - // and at most 3 times of it. We take advantage of this in both branches below. - final int maxLength = value.length() * Utf8.MAX_BYTES_PER_CHAR; - final int maxLengthVarIntSize = computeUInt32SizeNoTag(maxLength); - final int minLengthVarIntSize = computeUInt32SizeNoTag(value.length()); - if (minLengthVarIntSize == maxLengthVarIntSize) { - position = oldPosition + minLengthVarIntSize; - int newPosition = Utf8.encode(value, buffer, position, spaceLeft()); - // Since this class is stateful and tracks the position, we rewind and store the state, - // prepend the length, then reset it back to the end of the string. - position = oldPosition; - int length = newPosition - oldPosition - minLengthVarIntSize; - writeUInt32NoTag(length); - position = newPosition; - } else { - int length = Utf8.encodedLength(value); - writeUInt32NoTag(length); - position = Utf8.encode(value, buffer, position, spaceLeft()); - } - } catch (UnpairedSurrogateException e) { - // Roll back the change - we fall back to inefficient path. - position = oldPosition; - - // TODO(nathanmittler): We should throw an IOException here instead. - inefficientWriteStringNoTag(value, e); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void flush() { - // Do nothing. - } - - @Override - public final int spaceLeft() { - return limit - position; - } - - @Override - public final int getTotalBytesWritten() { - return position - offset; - } - } - - /** - * A {@link CodedOutputStream} that writes directly to a heap {@link ByteBuffer}. Writes are - * done directly to the underlying array. The buffer position is only updated after a flush. - */ - private static final class HeapNioEncoder extends ArrayEncoder { - private final ByteBuffer byteBuffer; - private int initialPosition; - - HeapNioEncoder(ByteBuffer byteBuffer) { - super(byteBuffer.array(), byteBuffer.arrayOffset() + byteBuffer.position(), - byteBuffer.remaining()); - this.byteBuffer = byteBuffer; - this.initialPosition = byteBuffer.position(); - } - - @Override - public void flush() { - // Update the position on the buffer. - byteBuffer.position(initialPosition + getTotalBytesWritten()); - } - } - - /** - * A {@link CodedOutputStream} that writes directly to a direct {@link ByteBuffer}, using only - * safe operations.. - */ - private static final class SafeDirectNioEncoder extends CodedOutputStream { - private final ByteBuffer originalBuffer; - private final ByteBuffer buffer; - private final int initialPosition; - - SafeDirectNioEncoder(ByteBuffer buffer) { - this.originalBuffer = buffer; - this.buffer = buffer.duplicate().order(ByteOrder.LITTLE_ENDIAN); - initialPosition = buffer.position(); - } - - @Override - public void writeTag(final int fieldNumber, final int wireType) throws IOException { - writeUInt32NoTag(WireFormat.makeTag(fieldNumber, wireType)); - } - - @Override - public void writeInt32(final int fieldNumber, final int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeInt32NoTag(value); - } - - @Override - public void writeUInt32(final int fieldNumber, final int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeUInt32NoTag(value); - } - - @Override - public void writeFixed32(final int fieldNumber, final int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED32); - writeFixed32NoTag(value); - } - - @Override - public void writeUInt64(final int fieldNumber, final long value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeUInt64NoTag(value); - } - - @Override - public void writeFixed64(final int fieldNumber, final long value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED64); - writeFixed64NoTag(value); - } - - @Override - public void writeBool(final int fieldNumber, final boolean value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - write((byte) (value ? 1 : 0)); - } - - @Override - public void writeString(final int fieldNumber, final String value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeStringNoTag(value); - } - - @Override - public void writeBytes(final int fieldNumber, final ByteString value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeBytesNoTag(value); - } - - @Override - public void writeByteArray(final int fieldNumber, final byte[] value) throws IOException { - writeByteArray(fieldNumber, value, 0, value.length); - } - - @Override - public void writeByteArray( - final int fieldNumber, final byte[] value, final int offset, final int length) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeByteArrayNoTag(value, offset, length); - } - - @Override - public void writeByteBuffer(final int fieldNumber, final ByteBuffer value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeUInt32NoTag(value.capacity()); - writeRawBytes(value); - } - - @Override - public void writeMessage(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeMessageNoTag(value); - } - - @Override - public void writeMessageSetExtension(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeMessage(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeRawMessageSetExtension(final int fieldNumber, final ByteString value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeBytes(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeMessageNoTag(final MessageLite value) throws IOException { - writeUInt32NoTag(value.getSerializedSize()); - value.writeTo(this); - } - - @Override - public void write(byte value) throws IOException { - try { - buffer.put(value); - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeBytesNoTag(final ByteString value) throws IOException { - writeUInt32NoTag(value.size()); - value.writeTo(this); - } - - @Override - public void writeByteArrayNoTag(final byte[] value, int offset, int length) throws IOException { - writeUInt32NoTag(length); - write(value, offset, length); - } - - @Override - public void writeRawBytes(final ByteBuffer value) throws IOException { - if (value.hasArray()) { - write(value.array(), value.arrayOffset(), value.capacity()); - } else { - ByteBuffer duplicated = value.duplicate(); - duplicated.clear(); - write(duplicated); - } - } - - @Override - public void writeInt32NoTag(int value) throws IOException { - if (value >= 0) { - writeUInt32NoTag(value); - } else { - // Must sign-extend. - writeUInt64NoTag(value); - } - } - - @Override - public void writeUInt32NoTag(int value) throws IOException { - try { - while (true) { - if ((value & ~0x7F) == 0) { - buffer.put((byte) value); - return; - } else { - buffer.put((byte) ((value & 0x7F) | 0x80)); - value >>>= 7; - } - } - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeFixed32NoTag(int value) throws IOException { - try { - buffer.putInt(value); - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeUInt64NoTag(long value) throws IOException { - try { - while (true) { - if ((value & ~0x7FL) == 0) { - buffer.put((byte) value); - return; - } else { - buffer.put((byte) (((int) value & 0x7F) | 0x80)); - value >>>= 7; - } - } - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeFixed64NoTag(long value) throws IOException { - try { - buffer.putLong(value); - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void write(byte[] value, int offset, int length) throws IOException { - try { - buffer.put(value, offset, length); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeLazy(byte[] value, int offset, int length) throws IOException { - write(value, offset, length); - } - - @Override - public void write(ByteBuffer value) throws IOException { - try { - buffer.put(value); - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeLazy(ByteBuffer value) throws IOException { - write(value); - } - - @Override - public void writeStringNoTag(String value) throws IOException { - final int startPos = buffer.position(); - try { - // UTF-8 byte length of the string is at least its UTF-16 code unit length (value.length()), - // and at most 3 times of it. We take advantage of this in both branches below. - final int maxEncodedSize = value.length() * Utf8.MAX_BYTES_PER_CHAR; - final int maxLengthVarIntSize = computeUInt32SizeNoTag(maxEncodedSize); - final int minLengthVarIntSize = computeUInt32SizeNoTag(value.length()); - if (minLengthVarIntSize == maxLengthVarIntSize) { - // Save the current position and increment past the length field. We'll come back - // and write the length field after the encoding is complete. - final int startOfBytes = buffer.position() + minLengthVarIntSize; - buffer.position(startOfBytes); - - // Encode the string. - encode(value); - - // Now go back to the beginning and write the length. - int endOfBytes = buffer.position(); - buffer.position(startPos); - writeUInt32NoTag(endOfBytes - startOfBytes); - - // Reposition the buffer past the written data. - buffer.position(endOfBytes); - } else { - final int length = Utf8.encodedLength(value); - writeUInt32NoTag(length); - encode(value); - } - } catch (UnpairedSurrogateException e) { - // Roll back the change and convert to an IOException. - buffer.position(startPos); - - // TODO(nathanmittler): We should throw an IOException here instead. - inefficientWriteStringNoTag(value, e); - } catch (IllegalArgumentException e) { - // Thrown by buffer.position() if out of range. - throw new OutOfSpaceException(e); - } - } - - @Override - public void flush() { - // Update the position of the original buffer. - originalBuffer.position(buffer.position()); - } - - @Override - public int spaceLeft() { - return buffer.remaining(); - } - - @Override - public int getTotalBytesWritten() { - return buffer.position() - initialPosition; - } - - private void encode(String value) throws IOException { - try { - Utf8.encodeUtf8(value, buffer); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } - } - } - - /** - * A {@link CodedOutputStream} that writes directly to a direct {@link ByteBuffer} using {@code - * sun.misc.Unsafe}. - */ - private static final class UnsafeDirectNioEncoder extends CodedOutputStream { - private final ByteBuffer originalBuffer; - private final ByteBuffer buffer; - private final long address; - private final long initialPosition; - private final long limit; - private final long oneVarintLimit; - private long position; - - UnsafeDirectNioEncoder(ByteBuffer buffer) { - this.originalBuffer = buffer; - this.buffer = buffer.duplicate().order(ByteOrder.LITTLE_ENDIAN); - address = UnsafeUtil.addressOffset(buffer); - initialPosition = address + buffer.position(); - limit = address + buffer.limit(); - oneVarintLimit = limit - MAX_VARINT_SIZE; - position = initialPosition; - } - - static boolean isSupported() { - return UnsafeUtil.hasUnsafeByteBufferOperations(); - } - - @Override - public void writeTag(int fieldNumber, int wireType) throws IOException { - writeUInt32NoTag(WireFormat.makeTag(fieldNumber, wireType)); - } - - @Override - public void writeInt32(int fieldNumber, int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeInt32NoTag(value); - } - - @Override - public void writeUInt32(int fieldNumber, int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeUInt32NoTag(value); - } - - @Override - public void writeFixed32(int fieldNumber, int value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED32); - writeFixed32NoTag(value); - } - - @Override - public void writeUInt64(int fieldNumber, long value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - writeUInt64NoTag(value); - } - - @Override - public void writeFixed64(int fieldNumber, long value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED64); - writeFixed64NoTag(value); - } - - @Override - public void writeBool(int fieldNumber, boolean value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - write((byte) (value ? 1 : 0)); - } - - @Override - public void writeString(int fieldNumber, String value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeStringNoTag(value); - } - - @Override - public void writeBytes(int fieldNumber, ByteString value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeBytesNoTag(value); - } - - @Override - public void writeByteArray(int fieldNumber, byte[] value) throws IOException { - writeByteArray(fieldNumber, value, 0, value.length); - } - - @Override - public void writeByteArray(int fieldNumber, byte[] value, int offset, int length) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeByteArrayNoTag(value, offset, length); - } - - @Override - public void writeByteBuffer(int fieldNumber, ByteBuffer value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeUInt32NoTag(value.capacity()); - writeRawBytes(value); - } - - @Override - public void writeMessage(int fieldNumber, MessageLite value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeMessageNoTag(value); - } - - @Override - public void writeMessageSetExtension(int fieldNumber, MessageLite value) throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeMessage(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeRawMessageSetExtension(int fieldNumber, ByteString value) throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeBytes(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeMessageNoTag(MessageLite value) throws IOException { - writeUInt32NoTag(value.getSerializedSize()); - value.writeTo(this); - } - - @Override - public void write(byte value) throws IOException { - if (position >= limit) { - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1)); - } - UnsafeUtil.putByte(position++, value); - } - - @Override - public void writeBytesNoTag(ByteString value) throws IOException { - writeUInt32NoTag(value.size()); - value.writeTo(this); - } - - @Override - public void writeByteArrayNoTag(byte[] value, int offset, int length) throws IOException { - writeUInt32NoTag(length); - write(value, offset, length); - } - - @Override - public void writeRawBytes(ByteBuffer value) throws IOException { - if (value.hasArray()) { - write(value.array(), value.arrayOffset(), value.capacity()); - } else { - ByteBuffer duplicated = value.duplicate(); - duplicated.clear(); - write(duplicated); - } - } - - @Override - public void writeInt32NoTag(int value) throws IOException { - if (value >= 0) { - writeUInt32NoTag(value); - } else { - // Must sign-extend. - writeUInt64NoTag(value); - } - } - - @Override - public void writeUInt32NoTag(int value) throws IOException { - if (position <= oneVarintLimit) { - // Optimization to avoid bounds checks on each iteration. - while (true) { - if ((value & ~0x7F) == 0) { - UnsafeUtil.putByte(position++, (byte) value); - return; - } else { - UnsafeUtil.putByte(position++, (byte) ((value & 0x7F) | 0x80)); - value >>>= 7; - } - } - } else { - while (position < limit) { - if ((value & ~0x7F) == 0) { - UnsafeUtil.putByte(position++, (byte) value); - return; - } else { - UnsafeUtil.putByte(position++, (byte) ((value & 0x7F) | 0x80)); - value >>>= 7; - } - } - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1)); - } - } - - @Override - public void writeFixed32NoTag(int value) throws IOException { - buffer.putInt(bufferPos(position), value); - position += FIXED_32_SIZE; - } - - @Override - public void writeUInt64NoTag(long value) throws IOException { - if (position <= oneVarintLimit) { - // Optimization to avoid bounds checks on each iteration. - while (true) { - if ((value & ~0x7FL) == 0) { - UnsafeUtil.putByte(position++, (byte) value); - return; - } else { - UnsafeUtil.putByte(position++, (byte) (((int) value & 0x7F) | 0x80)); - value >>>= 7; - } - } - } else { - while (position < limit) { - if ((value & ~0x7FL) == 0) { - UnsafeUtil.putByte(position++, (byte) value); - return; - } else { - UnsafeUtil.putByte(position++, (byte) (((int) value & 0x7F) | 0x80)); - value >>>= 7; - } - } - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, 1)); - } - } - - @Override - public void writeFixed64NoTag(long value) throws IOException { - buffer.putLong(bufferPos(position), value); - position += FIXED_64_SIZE; - } - - @Override - public void write(byte[] value, int offset, int length) throws IOException { - if (value == null - || offset < 0 - || length < 0 - || (value.length - length) < offset - || (limit - length) < position) { - if (value == null) { - throw new NullPointerException("value"); - } - throw new OutOfSpaceException( - String.format("Pos: %d, limit: %d, len: %d", position, limit, length)); - } - - UnsafeUtil.copyMemory( - value, UnsafeUtil.getArrayBaseOffset() + offset, null, position, length); - position += length; - } - - @Override - public void writeLazy(byte[] value, int offset, int length) throws IOException { - write(value, offset, length); - } - - @Override - public void write(ByteBuffer value) throws IOException { - try { - int length = value.remaining(); - repositionBuffer(position); - buffer.put(value); - position += length; - } catch (BufferOverflowException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void writeLazy(ByteBuffer value) throws IOException { - write(value); - } - - @Override - public void writeStringNoTag(String value) throws IOException { - long prevPos = position; - try { - // UTF-8 byte length of the string is at least its UTF-16 code unit length (value.length()), - // and at most 3 times of it. We take advantage of this in both branches below. - int maxEncodedSize = value.length() * Utf8.MAX_BYTES_PER_CHAR; - int maxLengthVarIntSize = computeUInt32SizeNoTag(maxEncodedSize); - int minLengthVarIntSize = computeUInt32SizeNoTag(value.length()); - if (minLengthVarIntSize == maxLengthVarIntSize) { - // Save the current position and increment past the length field. We'll come back - // and write the length field after the encoding is complete. - int stringStart = bufferPos(position) + minLengthVarIntSize; - buffer.position(stringStart); - - // Encode the string. - Utf8.encodeUtf8(value, buffer); - - // Write the length and advance the position. - int length = buffer.position() - stringStart; - writeUInt32NoTag(length); - position += length; - } else { - // Calculate and write the encoded length. - int length = Utf8.encodedLength(value); - writeUInt32NoTag(length); - - // Write the string and advance the position. - repositionBuffer(position); - Utf8.encodeUtf8(value, buffer); - position += length; - } - } catch (UnpairedSurrogateException e) { - // Roll back the change and convert to an IOException. - position = prevPos; - repositionBuffer(position); - - // TODO(nathanmittler): We should throw an IOException here instead. - inefficientWriteStringNoTag(value, e); - } catch (IllegalArgumentException e) { - // Thrown by buffer.position() if out of range. - throw new OutOfSpaceException(e); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void flush() { - // Update the position of the original buffer. - originalBuffer.position(bufferPos(position)); - } - - @Override - public int spaceLeft() { - return (int) (limit - position); - } - - @Override - public int getTotalBytesWritten() { - return (int) (position - initialPosition); - } - - private void repositionBuffer(long pos) { - buffer.position(bufferPos(pos)); - } - - private int bufferPos(long pos) { - return (int) (pos - address); - } - } - - /** - * Abstract base class for buffered encoders. - */ - private abstract static class AbstractBufferedEncoder extends CodedOutputStream { - final byte[] buffer; - final int limit; - int position; - int totalBytesWritten; - - AbstractBufferedEncoder(int bufferSize) { - if (bufferSize < 0) { - throw new IllegalArgumentException("bufferSize must be >= 0"); - } - // As an optimization, we require that the buffer be able to store at least 2 - // varints so that we can buffer any integer write (tag + value). This reduces the - // number of range checks for a single write to 1 (i.e. if there is not enough space - // to buffer the tag+value, flush and then buffer it). - this.buffer = new byte[max(bufferSize, MAX_VARINT_SIZE * 2)]; - this.limit = buffer.length; - } - - @Override - public final int spaceLeft() { - throw new UnsupportedOperationException( - "spaceLeft() can only be called on CodedOutputStreams that are " - + "writing to a flat array or ByteBuffer."); - } - - @Override - public final int getTotalBytesWritten() { - return totalBytesWritten; - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void buffer(byte value) { - buffer[position++] = value; - totalBytesWritten++; - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void bufferTag(final int fieldNumber, final int wireType) { - bufferUInt32NoTag(WireFormat.makeTag(fieldNumber, wireType)); - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void bufferInt32NoTag(final int value) { - if (value >= 0) { - bufferUInt32NoTag(value); - } else { - // Must sign-extend. - bufferUInt64NoTag(value); - } - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void bufferUInt32NoTag(int value) { - if (HAS_UNSAFE_ARRAY_OPERATIONS) { - final long originalPos = ARRAY_BASE_OFFSET + position; - long pos = originalPos; - while (true) { - if ((value & ~0x7F) == 0) { - UnsafeUtil.putByte(buffer, pos++, (byte) value); - break; - } else { - UnsafeUtil.putByte(buffer, pos++, (byte) ((value & 0x7F) | 0x80)); - value >>>= 7; - } - } - int delta = (int) (pos - originalPos); - position += delta; - totalBytesWritten += delta; - } else { - while (true) { - if ((value & ~0x7F) == 0) { - buffer[position++] = (byte) value; - totalBytesWritten++; - return; - } else { - buffer[position++] = (byte) ((value & 0x7F) | 0x80); - totalBytesWritten++; - value >>>= 7; - } - } - } - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void bufferUInt64NoTag(long value) { - if (HAS_UNSAFE_ARRAY_OPERATIONS) { - final long originalPos = ARRAY_BASE_OFFSET + position; - long pos = originalPos; - while (true) { - if ((value & ~0x7FL) == 0) { - UnsafeUtil.putByte(buffer, pos++, (byte) value); - break; - } else { - UnsafeUtil.putByte(buffer, pos++, (byte) (((int) value & 0x7F) | 0x80)); - value >>>= 7; - } - } - int delta = (int) (pos - originalPos); - position += delta; - totalBytesWritten += delta; - } else { - while (true) { - if ((value & ~0x7FL) == 0) { - buffer[position++] = (byte) value; - totalBytesWritten++; - return; - } else { - buffer[position++] = (byte) (((int) value & 0x7F) | 0x80); - totalBytesWritten++; - value >>>= 7; - } - } - } - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void bufferFixed32NoTag(int value) { - buffer[position++] = (byte) (value & 0xFF); - buffer[position++] = (byte) ((value >> 8) & 0xFF); - buffer[position++] = (byte) ((value >> 16) & 0xFF); - buffer[position++] = (byte) ((value >> 24) & 0xFF); - totalBytesWritten += FIXED_32_SIZE; - } - - /** - * This method does not perform bounds checking on the array. Checking array bounds is the - * responsibility of the caller. - */ - final void bufferFixed64NoTag(long value) { - buffer[position++] = (byte) (value & 0xFF); - buffer[position++] = (byte) ((value >> 8) & 0xFF); - buffer[position++] = (byte) ((value >> 16) & 0xFF); - buffer[position++] = (byte) ((value >> 24) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 32) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 40) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 48) & 0xFF); - buffer[position++] = (byte) ((int) (value >> 56) & 0xFF); - totalBytesWritten += FIXED_64_SIZE; - } - } - - /** - * A {@link CodedOutputStream} that decorates a {@link ByteOutput}. It internal buffer only to - * support string encoding operations. All other writes are just passed through to the - * {@link ByteOutput}. - */ - private static final class ByteOutputEncoder extends AbstractBufferedEncoder { - private final ByteOutput out; - - ByteOutputEncoder(ByteOutput out, int bufferSize) { - super(bufferSize); - if (out == null) { - throw new NullPointerException("out"); - } - this.out = out; - } - - @Override - public void writeTag(final int fieldNumber, final int wireType) throws IOException { - writeUInt32NoTag(WireFormat.makeTag(fieldNumber, wireType)); - } - - @Override - public void writeInt32(final int fieldNumber, final int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE * 2); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - bufferInt32NoTag(value); - } - - @Override - public void writeUInt32(final int fieldNumber, final int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE * 2); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - bufferUInt32NoTag(value); - } - - @Override - public void writeFixed32(final int fieldNumber, final int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE + FIXED_32_SIZE); - bufferTag(fieldNumber, WireFormat.WIRETYPE_FIXED32); - bufferFixed32NoTag(value); - } - - @Override - public void writeUInt64(final int fieldNumber, final long value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE * 2); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - bufferUInt64NoTag(value); - } - - @Override - public void writeFixed64(final int fieldNumber, final long value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE + FIXED_64_SIZE); - bufferTag(fieldNumber, WireFormat.WIRETYPE_FIXED64); - bufferFixed64NoTag(value); - } - - @Override - public void writeBool(final int fieldNumber, final boolean value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE + 1); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - buffer((byte) (value ? 1 : 0)); - } - - @Override - public void writeString(final int fieldNumber, final String value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeStringNoTag(value); - } - - @Override - public void writeBytes(final int fieldNumber, final ByteString value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeBytesNoTag(value); - } - - @Override - public void writeByteArray(final int fieldNumber, final byte[] value) throws IOException { - writeByteArray(fieldNumber, value, 0, value.length); - } - - @Override - public void writeByteArray( - final int fieldNumber, final byte[] value, final int offset, final int length) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeByteArrayNoTag(value, offset, length); - } - - @Override - public void writeByteBuffer(final int fieldNumber, final ByteBuffer value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeUInt32NoTag(value.capacity()); - writeRawBytes(value); - } - - @Override - public void writeBytesNoTag(final ByteString value) throws IOException { - writeUInt32NoTag(value.size()); - value.writeTo(this); - } - - @Override - public void writeByteArrayNoTag(final byte[] value, int offset, int length) throws IOException { - writeUInt32NoTag(length); - write(value, offset, length); - } - - @Override - public void writeRawBytes(final ByteBuffer value) throws IOException { - if (value.hasArray()) { - write(value.array(), value.arrayOffset(), value.capacity()); - } else { - ByteBuffer duplicated = value.duplicate(); - duplicated.clear(); - write(duplicated); - } - } - - @Override - public void writeMessage(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeMessageNoTag(value); - } - - @Override - public void writeMessageSetExtension(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeMessage(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeRawMessageSetExtension(final int fieldNumber, final ByteString value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeBytes(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeMessageNoTag(final MessageLite value) throws IOException { - writeUInt32NoTag(value.getSerializedSize()); - value.writeTo(this); - } - - @Override - public void write(byte value) throws IOException { - if (position == limit) { - doFlush(); - } - - buffer(value); - } - - @Override - public void writeInt32NoTag(int value) throws IOException { - if (value >= 0) { - writeUInt32NoTag(value); - } else { - // Must sign-extend. - writeUInt64NoTag(value); - } - } - - @Override - public void writeUInt32NoTag(int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE); - bufferUInt32NoTag(value); - } - - @Override - public void writeFixed32NoTag(final int value) throws IOException { - flushIfNotAvailable(FIXED_32_SIZE); - bufferFixed32NoTag(value); - } - - @Override - public void writeUInt64NoTag(long value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE); - bufferUInt64NoTag(value); - } - - @Override - public void writeFixed64NoTag(final long value) throws IOException { - flushIfNotAvailable(FIXED_64_SIZE); - bufferFixed64NoTag(value); - } - - @Override - public void writeStringNoTag(String value) throws IOException { - // UTF-8 byte length of the string is at least its UTF-16 code unit length (value.length()), - // and at most 3 times of it. We take advantage of this in both branches below. - final int maxLength = value.length() * Utf8.MAX_BYTES_PER_CHAR; - final int maxLengthVarIntSize = computeUInt32SizeNoTag(maxLength); - - // If we are streaming and the potential length is too big to fit in our buffer, we take the - // slower path. - if (maxLengthVarIntSize + maxLength > limit) { - // Allocate a byte[] that we know can fit the string and encode into it. String.getBytes() - // does the same internally and then does *another copy* to return a byte[] of exactly the - // right size. We can skip that copy and just writeRawBytes up to the actualLength of the - // UTF-8 encoded bytes. - final byte[] encodedBytes = new byte[maxLength]; - int actualLength = Utf8.encode(value, encodedBytes, 0, maxLength); - writeUInt32NoTag(actualLength); - writeLazy(encodedBytes, 0, actualLength); - return; - } - - // Fast path: we have enough space available in our buffer for the string... - if (maxLengthVarIntSize + maxLength > limit - position) { - // Flush to free up space. - doFlush(); - } - - final int oldPosition = position; - try { - // Optimize for the case where we know this length results in a constant varint length as - // this saves a pass for measuring the length of the string. - final int minLengthVarIntSize = computeUInt32SizeNoTag(value.length()); - - if (minLengthVarIntSize == maxLengthVarIntSize) { - position = oldPosition + minLengthVarIntSize; - int newPosition = Utf8.encode(value, buffer, position, limit - position); - // Since this class is stateful and tracks the position, we rewind and store the state, - // prepend the length, then reset it back to the end of the string. - position = oldPosition; - int length = newPosition - oldPosition - minLengthVarIntSize; - bufferUInt32NoTag(length); - position = newPosition; - totalBytesWritten += length; - } else { - int length = Utf8.encodedLength(value); - bufferUInt32NoTag(length); - position = Utf8.encode(value, buffer, position, length); - totalBytesWritten += length; - } - } catch (UnpairedSurrogateException e) { - // Roll back the change and convert to an IOException. - totalBytesWritten -= position - oldPosition; - position = oldPosition; - - // TODO(nathanmittler): We should throw an IOException here instead. - inefficientWriteStringNoTag(value, e); - } catch (IndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } - } - - @Override - public void flush() throws IOException { - if (position > 0) { - // Flush the buffer. - doFlush(); - } - } - - @Override - public void write(byte[] value, int offset, int length) throws IOException { - flush(); - out.write(value, offset, length); - totalBytesWritten += length; - } - - @Override - public void writeLazy(byte[] value, int offset, int length) throws IOException { - flush(); - out.writeLazy(value, offset, length); - totalBytesWritten += length; - } - - @Override - public void write(ByteBuffer value) throws IOException { - flush(); - int length = value.remaining(); - out.write(value); - totalBytesWritten += length; - } - - @Override - public void writeLazy(ByteBuffer value) throws IOException { - flush(); - int length = value.remaining(); - out.writeLazy(value); - totalBytesWritten += length; - } - - private void flushIfNotAvailable(int requiredSize) throws IOException { - if (limit - position < requiredSize) { - doFlush(); - } - } - - private void doFlush() throws IOException { - out.write(buffer, 0, position); - position = 0; - } - } - - /** - * An {@link CodedOutputStream} that decorates an {@link OutputStream}. It performs internal - * buffering to optimize writes to the {@link OutputStream}. - */ - private static final class OutputStreamEncoder extends AbstractBufferedEncoder { - private final OutputStream out; - - OutputStreamEncoder(OutputStream out, int bufferSize) { - super(bufferSize); - if (out == null) { - throw new NullPointerException("out"); - } - this.out = out; - } - - @Override - public void writeTag(final int fieldNumber, final int wireType) throws IOException { - writeUInt32NoTag(WireFormat.makeTag(fieldNumber, wireType)); - } - - @Override - public void writeInt32(final int fieldNumber, final int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE * 2); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - bufferInt32NoTag(value); - } - - @Override - public void writeUInt32(final int fieldNumber, final int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE * 2); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - bufferUInt32NoTag(value); - } - - @Override - public void writeFixed32(final int fieldNumber, final int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE + FIXED_32_SIZE); - bufferTag(fieldNumber, WireFormat.WIRETYPE_FIXED32); - bufferFixed32NoTag(value); - } - - @Override - public void writeUInt64(final int fieldNumber, final long value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE * 2); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - bufferUInt64NoTag(value); - } - - @Override - public void writeFixed64(final int fieldNumber, final long value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE + FIXED_64_SIZE); - bufferTag(fieldNumber, WireFormat.WIRETYPE_FIXED64); - bufferFixed64NoTag(value); - } - - @Override - public void writeBool(final int fieldNumber, final boolean value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE + 1); - bufferTag(fieldNumber, WireFormat.WIRETYPE_VARINT); - buffer((byte) (value ? 1 : 0)); - } - - @Override - public void writeString(final int fieldNumber, final String value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeStringNoTag(value); - } - - @Override - public void writeBytes(final int fieldNumber, final ByteString value) throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeBytesNoTag(value); - } - - @Override - public void writeByteArray(final int fieldNumber, final byte[] value) throws IOException { - writeByteArray(fieldNumber, value, 0, value.length); - } - - @Override - public void writeByteArray( - final int fieldNumber, final byte[] value, final int offset, final int length) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeByteArrayNoTag(value, offset, length); - } - - @Override - public void writeByteBuffer(final int fieldNumber, final ByteBuffer value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeUInt32NoTag(value.capacity()); - writeRawBytes(value); - } - - @Override - public void writeBytesNoTag(final ByteString value) throws IOException { - writeUInt32NoTag(value.size()); - value.writeTo(this); - } - - @Override - public void writeByteArrayNoTag(final byte[] value, int offset, int length) throws IOException { - writeUInt32NoTag(length); - write(value, offset, length); - } - - @Override - public void writeRawBytes(final ByteBuffer value) throws IOException { - if (value.hasArray()) { - write(value.array(), value.arrayOffset(), value.capacity()); - } else { - ByteBuffer duplicated = value.duplicate(); - duplicated.clear(); - write(duplicated); - } - } - - @Override - public void writeMessage(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED); - writeMessageNoTag(value); - } - - @Override - public void writeMessageSetExtension(final int fieldNumber, final MessageLite value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeMessage(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeRawMessageSetExtension(final int fieldNumber, final ByteString value) - throws IOException { - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_START_GROUP); - writeUInt32(WireFormat.MESSAGE_SET_TYPE_ID, fieldNumber); - writeBytes(WireFormat.MESSAGE_SET_MESSAGE, value); - writeTag(WireFormat.MESSAGE_SET_ITEM, WireFormat.WIRETYPE_END_GROUP); - } - - @Override - public void writeMessageNoTag(final MessageLite value) throws IOException { - writeUInt32NoTag(value.getSerializedSize()); - value.writeTo(this); - } - - @Override - public void write(byte value) throws IOException { - if (position == limit) { - doFlush(); - } - - buffer(value); - } - - @Override - public void writeInt32NoTag(int value) throws IOException { - if (value >= 0) { - writeUInt32NoTag(value); - } else { - // Must sign-extend. - writeUInt64NoTag(value); - } - } - - @Override - public void writeUInt32NoTag(int value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE); - bufferUInt32NoTag(value); - } - - @Override - public void writeFixed32NoTag(final int value) throws IOException { - flushIfNotAvailable(FIXED_32_SIZE); - bufferFixed32NoTag(value); - } - - @Override - public void writeUInt64NoTag(long value) throws IOException { - flushIfNotAvailable(MAX_VARINT_SIZE); - bufferUInt64NoTag(value); - } - - @Override - public void writeFixed64NoTag(final long value) throws IOException { - flushIfNotAvailable(FIXED_64_SIZE); - bufferFixed64NoTag(value); - } - - @Override - public void writeStringNoTag(String value) throws IOException { - try { - // UTF-8 byte length of the string is at least its UTF-16 code unit length (value.length()), - // and at most 3 times of it. We take advantage of this in both branches below. - final int maxLength = value.length() * Utf8.MAX_BYTES_PER_CHAR; - final int maxLengthVarIntSize = computeUInt32SizeNoTag(maxLength); - - // If we are streaming and the potential length is too big to fit in our buffer, we take the - // slower path. - if (maxLengthVarIntSize + maxLength > limit) { - // Allocate a byte[] that we know can fit the string and encode into it. String.getBytes() - // does the same internally and then does *another copy* to return a byte[] of exactly the - // right size. We can skip that copy and just writeRawBytes up to the actualLength of the - // UTF-8 encoded bytes. - final byte[] encodedBytes = new byte[maxLength]; - int actualLength = Utf8.encode(value, encodedBytes, 0, maxLength); - writeUInt32NoTag(actualLength); - writeLazy(encodedBytes, 0, actualLength); - return; - } - - // Fast path: we have enough space available in our buffer for the string... - if (maxLengthVarIntSize + maxLength > limit - position) { - // Flush to free up space. - doFlush(); - } - - // Optimize for the case where we know this length results in a constant varint length as - // this saves a pass for measuring the length of the string. - final int minLengthVarIntSize = computeUInt32SizeNoTag(value.length()); - int oldPosition = position; - final int length; - try { - if (minLengthVarIntSize == maxLengthVarIntSize) { - position = oldPosition + minLengthVarIntSize; - int newPosition = Utf8.encode(value, buffer, position, limit - position); - // Since this class is stateful and tracks the position, we rewind and store the - // state, prepend the length, then reset it back to the end of the string. - position = oldPosition; - length = newPosition - oldPosition - minLengthVarIntSize; - bufferUInt32NoTag(length); - position = newPosition; - } else { - length = Utf8.encodedLength(value); - bufferUInt32NoTag(length); - position = Utf8.encode(value, buffer, position, length); - } - totalBytesWritten += length; - } catch (UnpairedSurrogateException e) { - // Be extra careful and restore the original position for retrying the write with the - // less efficient path. - totalBytesWritten -= position - oldPosition; - position = oldPosition; - throw e; - } catch (ArrayIndexOutOfBoundsException e) { - throw new OutOfSpaceException(e); - } - } catch (UnpairedSurrogateException e) { - inefficientWriteStringNoTag(value, e); - } - } - - @Override - public void flush() throws IOException { - if (position > 0) { - // Flush the buffer. - doFlush(); - } - } - - @Override - public void write(byte[] value, int offset, int length) - throws IOException { - if (limit - position >= length) { - // We have room in the current buffer. - System.arraycopy(value, offset, buffer, position, length); - position += length; - totalBytesWritten += length; - } else { - // Write extends past current buffer. Fill the rest of this buffer and - // flush. - final int bytesWritten = limit - position; - System.arraycopy(value, offset, buffer, position, bytesWritten); - offset += bytesWritten; - length -= bytesWritten; - position = limit; - totalBytesWritten += bytesWritten; - doFlush(); - - // Now deal with the rest. - // Since we have an output stream, this is our buffer - // and buffer offset == 0 - if (length <= limit) { - // Fits in new buffer. - System.arraycopy(value, offset, buffer, 0, length); - position = length; - } else { - // Write is very big. Let's do it all at once. - out.write(value, offset, length); - } - totalBytesWritten += length; - } - } - - @Override - public void writeLazy(byte[] value, int offset, int length) throws IOException { - write(value, offset, length); - } - - @Override - public void write(ByteBuffer value) throws IOException { - int length = value.remaining(); - if (limit - position >= length) { - // We have room in the current buffer. - value.get(buffer, position, length); - position += length; - totalBytesWritten += length; - } else { - // Write extends past current buffer. Fill the rest of this buffer and - // flush. - final int bytesWritten = limit - position; - value.get(buffer, position, bytesWritten); - length -= bytesWritten; - position = limit; - totalBytesWritten += bytesWritten; - doFlush(); - - // Now deal with the rest. - // Since we have an output stream, this is our buffer - // and buffer offset == 0 - while (length > limit) { - // Copy data into the buffer before writing it to OutputStream. - value.get(buffer, 0, limit); - out.write(buffer, 0, limit); - length -= limit; - totalBytesWritten += limit; - } - value.get(buffer, 0, length); - position = length; - totalBytesWritten += length; - } - } - - @Override - public void writeLazy(ByteBuffer value) throws IOException { - write(value); - } - - private void flushIfNotAvailable(int requiredSize) throws IOException { - if (limit - position < requiredSize) { - doFlush(); - } - } - - private void doFlush() throws IOException { - out.write(buffer, 0, position); - position = 0; - } - } -} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java deleted file mode 100644 index 0468e6c5a99..00000000000 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java +++ /dev/null @@ -1,39141 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/protobuf/descriptor.proto - -package org.apache.hadoop.hbase.shaded.com.google.protobuf; - -public final class DescriptorProtos { - private DescriptorProtos() {} - public static void registerAllExtensions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { - } - - public static void registerAllExtensions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); - } - public interface FileDescriptorSetOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.FileDescriptorSet) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - java.util.List - getFileList(); - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index); - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - int getFileCount(); - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - java.util.List - getFileOrBuilderList(); - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder( - int index); - } - /** - *

-   * The protocol compiler can output a FileDescriptorSet containing the .proto
-   * files it parses.
-   * 
- * - * Protobuf type {@code google.protobuf.FileDescriptorSet} - */ - public static final class FileDescriptorSet extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.FileDescriptorSet) - FileDescriptorSetOrBuilder { - // Use FileDescriptorSet.newBuilder() to construct. - private FileDescriptorSet(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private FileDescriptorSet() { - file_ = java.util.Collections.emptyList(); - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private FileDescriptorSet( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - file_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - file_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.PARSER, extensionRegistry)); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - file_ = java.util.Collections.unmodifiableList(file_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder.class); - } - - public static final int FILE_FIELD_NUMBER = 1; - private java.util.List file_; - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public java.util.List getFileList() { - return file_; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public java.util.List - getFileOrBuilderList() { - return file_; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public int getFileCount() { - return file_.size(); - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index) { - return file_.get(index); - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder( - int index) { - return file_.get(index); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - for (int i = 0; i < getFileCount(); i++) { - if (!getFile(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < file_.size(); i++) { - output.writeMessage(1, file_.get(i)); - } - unknownFields.writeTo(output); - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < file_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(1, file_.get(i)); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) obj; - - boolean result = true; - result = result && getFileList() - .equals(other.getFileList()); - result = result && unknownFields.equals(other.unknownFields); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (getFileCount() > 0) { - hash = (37 * hash) + FILE_FIELD_NUMBER; - hash = (53 * hash) + getFileList().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-     * The protocol compiler can output a FileDescriptorSet containing the .proto
-     * files it parses.
-     * 
- * - * Protobuf type {@code google.protobuf.FileDescriptorSet} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.FileDescriptorSet) - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSetOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getFileFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - if (fileBuilder_ == null) { - file_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - fileBuilder_.clear(); - } - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorSet_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet(this); - int from_bitField0_ = bitField0_; - if (fileBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - file_ = java.util.Collections.unmodifiableList(file_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.file_ = file_; - } else { - result.file_ = fileBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet.getDefaultInstance()) return this; - if (fileBuilder_ == null) { - if (!other.file_.isEmpty()) { - if (file_.isEmpty()) { - file_ = other.file_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureFileIsMutable(); - file_.addAll(other.file_); - } - onChanged(); - } - } else { - if (!other.file_.isEmpty()) { - if (fileBuilder_.isEmpty()) { - fileBuilder_.dispose(); - fileBuilder_ = null; - file_ = other.file_; - bitField0_ = (bitField0_ & ~0x00000001); - fileBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getFileFieldBuilder() : null; - } else { - fileBuilder_.addAllMessages(other.file_); - } - } - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getFileCount(); i++) { - if (!getFile(i).isInitialized()) { - return false; - } - } - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List file_ = - java.util.Collections.emptyList(); - private void ensureFileIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - file_ = new java.util.ArrayList(file_); - bitField0_ |= 0x00000001; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> fileBuilder_; - - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public java.util.List getFileList() { - if (fileBuilder_ == null) { - return java.util.Collections.unmodifiableList(file_); - } else { - return fileBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public int getFileCount() { - if (fileBuilder_ == null) { - return file_.size(); - } else { - return fileBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getFile(int index) { - if (fileBuilder_ == null) { - return file_.get(index); - } else { - return fileBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder setFile( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) { - if (fileBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFileIsMutable(); - file_.set(index, value); - onChanged(); - } else { - fileBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder setFile( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) { - if (fileBuilder_ == null) { - ensureFileIsMutable(); - file_.set(index, builderForValue.build()); - onChanged(); - } else { - fileBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder addFile(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) { - if (fileBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFileIsMutable(); - file_.add(value); - onChanged(); - } else { - fileBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder addFile( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto value) { - if (fileBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFileIsMutable(); - file_.add(index, value); - onChanged(); - } else { - fileBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder addFile( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) { - if (fileBuilder_ == null) { - ensureFileIsMutable(); - file_.add(builderForValue.build()); - onChanged(); - } else { - fileBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder addFile( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder builderForValue) { - if (fileBuilder_ == null) { - ensureFileIsMutable(); - file_.add(index, builderForValue.build()); - onChanged(); - } else { - fileBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder addAllFile( - java.lang.Iterable values) { - if (fileBuilder_ == null) { - ensureFileIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, file_); - onChanged(); - } else { - fileBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder clearFile() { - if (fileBuilder_ == null) { - file_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - fileBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public Builder removeFile(int index) { - if (fileBuilder_ == null) { - ensureFileIsMutable(); - file_.remove(index); - onChanged(); - } else { - fileBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder getFileBuilder( - int index) { - return getFileFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder getFileOrBuilder( - int index) { - if (fileBuilder_ == null) { - return file_.get(index); } else { - return fileBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public java.util.List - getFileOrBuilderList() { - if (fileBuilder_ != null) { - return fileBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(file_); - } - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder addFileBuilder() { - return getFileFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder addFileBuilder( - int index) { - return getFileFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FileDescriptorProto file = 1; - */ - public java.util.List - getFileBuilderList() { - return getFileFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder> - getFileFieldBuilder() { - if (fileBuilder_ == null) { - fileBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder>( - file_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - file_ = null; - } - return fileBuilder_; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.FileDescriptorSet) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public FileDescriptorSet parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new FileDescriptorSet(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface FileDescriptorProtoOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.FileDescriptorProto) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-     * file name, relative to root of source tree
-     * 
- * - * optional string name = 1; - */ - boolean hasName(); - /** - *
-     * file name, relative to root of source tree
-     * 
- * - * optional string name = 1; - */ - java.lang.String getName(); - /** - *
-     * file name, relative to root of source tree
-     * 
- * - * optional string name = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes(); - - /** - *
-     * e.g. "foo", "foo.bar", etc.
-     * 
- * - * optional string package = 2; - */ - boolean hasPackage(); - /** - *
-     * e.g. "foo", "foo.bar", etc.
-     * 
- * - * optional string package = 2; - */ - java.lang.String getPackage(); - /** - *
-     * e.g. "foo", "foo.bar", etc.
-     * 
- * - * optional string package = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getPackageBytes(); - - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - java.util.List - getDependencyList(); - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - int getDependencyCount(); - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - java.lang.String getDependency(int index); - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getDependencyBytes(int index); - - /** - *
-     * Indexes of the public imported files in the dependency list above.
-     * 
- * - * repeated int32 public_dependency = 10; - */ - java.util.List getPublicDependencyList(); - /** - *
-     * Indexes of the public imported files in the dependency list above.
-     * 
- * - * repeated int32 public_dependency = 10; - */ - int getPublicDependencyCount(); - /** - *
-     * Indexes of the public imported files in the dependency list above.
-     * 
- * - * repeated int32 public_dependency = 10; - */ - int getPublicDependency(int index); - - /** - *
-     * Indexes of the weak imported files in the dependency list.
-     * For Google-internal migration only. Do not use.
-     * 
- * - * repeated int32 weak_dependency = 11; - */ - java.util.List getWeakDependencyList(); - /** - *
-     * Indexes of the weak imported files in the dependency list.
-     * For Google-internal migration only. Do not use.
-     * 
- * - * repeated int32 weak_dependency = 11; - */ - int getWeakDependencyCount(); - /** - *
-     * Indexes of the weak imported files in the dependency list.
-     * For Google-internal migration only. Do not use.
-     * 
- * - * repeated int32 weak_dependency = 11; - */ - int getWeakDependency(int index); - - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - java.util.List - getMessageTypeList(); - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index); - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - int getMessageTypeCount(); - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - java.util.List - getMessageTypeOrBuilderList(); - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder( - int index); - - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - java.util.List - getEnumTypeList(); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - int getEnumTypeCount(); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - java.util.List - getEnumTypeOrBuilderList(); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( - int index); - - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - java.util.List - getServiceList(); - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index); - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - int getServiceCount(); - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - java.util.List - getServiceOrBuilderList(); - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder( - int index); - - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - java.util.List - getExtensionList(); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - int getExtensionCount(); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - java.util.List - getExtensionOrBuilderList(); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( - int index); - - /** - * optional .google.protobuf.FileOptions options = 8; - */ - boolean hasOptions(); - /** - * optional .google.protobuf.FileOptions options = 8; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions(); - /** - * optional .google.protobuf.FileOptions options = 8; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder(); - - /** - *
-     * This field contains optional information about the original source code.
-     * You may safely remove this entire field without harming runtime
-     * functionality of the descriptors -- the information is needed only by
-     * development tools.
-     * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - boolean hasSourceCodeInfo(); - /** - *
-     * This field contains optional information about the original source code.
-     * You may safely remove this entire field without harming runtime
-     * functionality of the descriptors -- the information is needed only by
-     * development tools.
-     * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo(); - /** - *
-     * This field contains optional information about the original source code.
-     * You may safely remove this entire field without harming runtime
-     * functionality of the descriptors -- the information is needed only by
-     * development tools.
-     * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder(); - - /** - *
-     * The syntax of the proto file.
-     * The supported values are "proto2" and "proto3".
-     * 
- * - * optional string syntax = 12; - */ - boolean hasSyntax(); - /** - *
-     * The syntax of the proto file.
-     * The supported values are "proto2" and "proto3".
-     * 
- * - * optional string syntax = 12; - */ - java.lang.String getSyntax(); - /** - *
-     * The syntax of the proto file.
-     * The supported values are "proto2" and "proto3".
-     * 
- * - * optional string syntax = 12; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getSyntaxBytes(); - } - /** - *
-   * Describes a complete .proto file.
-   * 
- * - * Protobuf type {@code google.protobuf.FileDescriptorProto} - */ - public static final class FileDescriptorProto extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.FileDescriptorProto) - FileDescriptorProtoOrBuilder { - // Use FileDescriptorProto.newBuilder() to construct. - private FileDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private FileDescriptorProto() { - name_ = ""; - package_ = ""; - dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - publicDependency_ = java.util.Collections.emptyList(); - weakDependency_ = java.util.Collections.emptyList(); - messageType_ = java.util.Collections.emptyList(); - enumType_ = java.util.Collections.emptyList(); - service_ = java.util.Collections.emptyList(); - extension_ = java.util.Collections.emptyList(); - syntax_ = ""; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private FileDescriptorProto( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000001; - name_ = bs; - break; - } - case 18: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000002; - package_ = bs; - break; - } - case 26: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - dependency_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000004; - } - dependency_.add(bs); - break; - } - case 34: { - if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - messageType_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000020; - } - messageType_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.PARSER, extensionRegistry)); - break; - } - case 42: { - if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { - enumType_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000040; - } - enumType_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 50: { - if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { - service_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000080; - } - service_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 58: { - if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { - extension_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000100; - } - extension_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 66: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder subBuilder = null; - if (((bitField0_ & 0x00000004) == 0x00000004)) { - subBuilder = options_.toBuilder(); - } - options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(options_); - options_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000004; - break; - } - case 74: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder subBuilder = null; - if (((bitField0_ & 0x00000008) == 0x00000008)) { - subBuilder = sourceCodeInfo_.toBuilder(); - } - sourceCodeInfo_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(sourceCodeInfo_); - sourceCodeInfo_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000008; - break; - } - case 80: { - if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - publicDependency_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000008; - } - publicDependency_.add(input.readInt32()); - break; - } - case 82: { - int length = input.readRawVarint32(); - int limit = input.pushLimit(length); - if (!((mutable_bitField0_ & 0x00000008) == 0x00000008) && input.getBytesUntilLimit() > 0) { - publicDependency_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000008; - } - while (input.getBytesUntilLimit() > 0) { - publicDependency_.add(input.readInt32()); - } - input.popLimit(limit); - break; - } - case 88: { - if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - weakDependency_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000010; - } - weakDependency_.add(input.readInt32()); - break; - } - case 90: { - int length = input.readRawVarint32(); - int limit = input.pushLimit(length); - if (!((mutable_bitField0_ & 0x00000010) == 0x00000010) && input.getBytesUntilLimit() > 0) { - weakDependency_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000010; - } - while (input.getBytesUntilLimit() > 0) { - weakDependency_.add(input.readInt32()); - } - input.popLimit(limit); - break; - } - case 98: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000010; - syntax_ = bs; - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - dependency_ = dependency_.getUnmodifiableView(); - } - if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - messageType_ = java.util.Collections.unmodifiableList(messageType_); - } - if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { - enumType_ = java.util.Collections.unmodifiableList(enumType_); - } - if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { - service_ = java.util.Collections.unmodifiableList(service_); - } - if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { - extension_ = java.util.Collections.unmodifiableList(extension_); - } - if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - publicDependency_ = java.util.Collections.unmodifiableList(publicDependency_); - } - if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - weakDependency_ = java.util.Collections.unmodifiableList(weakDependency_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder.class); - } - - private int bitField0_; - public static final int NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object name_; - /** - *
-     * file name, relative to root of source tree
-     * 
- * - * optional string name = 1; - */ - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - *
-     * file name, relative to root of source tree
-     * 
- * - * optional string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } - } - /** - *
-     * file name, relative to root of source tree
-     * 
- * - * optional string name = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - public static final int PACKAGE_FIELD_NUMBER = 2; - private volatile java.lang.Object package_; - /** - *
-     * e.g. "foo", "foo.bar", etc.
-     * 
- * - * optional string package = 2; - */ - public boolean hasPackage() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - *
-     * e.g. "foo", "foo.bar", etc.
-     * 
- * - * optional string package = 2; - */ - public java.lang.String getPackage() { - java.lang.Object ref = package_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - package_ = s; - } - return s; - } - } - /** - *
-     * e.g. "foo", "foo.bar", etc.
-     * 
- * - * optional string package = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getPackageBytes() { - java.lang.Object ref = package_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - package_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - public static final int DEPENDENCY_FIELD_NUMBER = 3; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList dependency_; - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList - getDependencyList() { - return dependency_; - } - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - public int getDependencyCount() { - return dependency_.size(); - } - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - public java.lang.String getDependency(int index) { - return dependency_.get(index); - } - /** - *
-     * Names of files imported by this file.
-     * 
- * - * repeated string dependency = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getDependencyBytes(int index) { - return dependency_.getByteString(index); - } - - public static final int PUBLIC_DEPENDENCY_FIELD_NUMBER = 10; - private java.util.List publicDependency_; - /** - *
-     * Indexes of the public imported files in the dependency list above.
-     * 
- * - * repeated int32 public_dependency = 10; - */ - public java.util.List - getPublicDependencyList() { - return publicDependency_; - } - /** - *
-     * Indexes of the public imported files in the dependency list above.
-     * 
- * - * repeated int32 public_dependency = 10; - */ - public int getPublicDependencyCount() { - return publicDependency_.size(); - } - /** - *
-     * Indexes of the public imported files in the dependency list above.
-     * 
- * - * repeated int32 public_dependency = 10; - */ - public int getPublicDependency(int index) { - return publicDependency_.get(index); - } - - public static final int WEAK_DEPENDENCY_FIELD_NUMBER = 11; - private java.util.List weakDependency_; - /** - *
-     * Indexes of the weak imported files in the dependency list.
-     * For Google-internal migration only. Do not use.
-     * 
- * - * repeated int32 weak_dependency = 11; - */ - public java.util.List - getWeakDependencyList() { - return weakDependency_; - } - /** - *
-     * Indexes of the weak imported files in the dependency list.
-     * For Google-internal migration only. Do not use.
-     * 
- * - * repeated int32 weak_dependency = 11; - */ - public int getWeakDependencyCount() { - return weakDependency_.size(); - } - /** - *
-     * Indexes of the weak imported files in the dependency list.
-     * For Google-internal migration only. Do not use.
-     * 
- * - * repeated int32 weak_dependency = 11; - */ - public int getWeakDependency(int index) { - return weakDependency_.get(index); - } - - public static final int MESSAGE_TYPE_FIELD_NUMBER = 4; - private java.util.List messageType_; - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public java.util.List getMessageTypeList() { - return messageType_; - } - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public java.util.List - getMessageTypeOrBuilderList() { - return messageType_; - } - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public int getMessageTypeCount() { - return messageType_.size(); - } - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index) { - return messageType_.get(index); - } - /** - *
-     * All top-level definitions in this file.
-     * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder( - int index) { - return messageType_.get(index); - } - - public static final int ENUM_TYPE_FIELD_NUMBER = 5; - private java.util.List enumType_; - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public java.util.List getEnumTypeList() { - return enumType_; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public java.util.List - getEnumTypeOrBuilderList() { - return enumType_; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public int getEnumTypeCount() { - return enumType_.size(); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { - return enumType_.get(index); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( - int index) { - return enumType_.get(index); - } - - public static final int SERVICE_FIELD_NUMBER = 6; - private java.util.List service_; - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public java.util.List getServiceList() { - return service_; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public java.util.List - getServiceOrBuilderList() { - return service_; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public int getServiceCount() { - return service_.size(); - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index) { - return service_.get(index); - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder( - int index) { - return service_.get(index); - } - - public static final int EXTENSION_FIELD_NUMBER = 7; - private java.util.List extension_; - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public java.util.List getExtensionList() { - return extension_; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public java.util.List - getExtensionOrBuilderList() { - return extension_; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public int getExtensionCount() { - return extension_.size(); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { - return extension_.get(index); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( - int index) { - return extension_.get(index); - } - - public static final int OPTIONS_FIELD_NUMBER = 8; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions options_; - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public boolean hasOptions() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions() { - return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder() { - return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; - } - - public static final int SOURCE_CODE_INFO_FIELD_NUMBER = 9; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo sourceCodeInfo_; - /** - *
-     * This field contains optional information about the original source code.
-     * You may safely remove this entire field without harming runtime
-     * functionality of the descriptors -- the information is needed only by
-     * development tools.
-     * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public boolean hasSourceCodeInfo() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - *
-     * This field contains optional information about the original source code.
-     * You may safely remove this entire field without harming runtime
-     * functionality of the descriptors -- the information is needed only by
-     * development tools.
-     * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo() { - return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; - } - /** - *
-     * This field contains optional information about the original source code.
-     * You may safely remove this entire field without harming runtime
-     * functionality of the descriptors -- the information is needed only by
-     * development tools.
-     * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder() { - return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; - } - - public static final int SYNTAX_FIELD_NUMBER = 12; - private volatile java.lang.Object syntax_; - /** - *
-     * The syntax of the proto file.
-     * The supported values are "proto2" and "proto3".
-     * 
- * - * optional string syntax = 12; - */ - public boolean hasSyntax() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - *
-     * The syntax of the proto file.
-     * The supported values are "proto2" and "proto3".
-     * 
- * - * optional string syntax = 12; - */ - public java.lang.String getSyntax() { - java.lang.Object ref = syntax_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - syntax_ = s; - } - return s; - } - } - /** - *
-     * The syntax of the proto file.
-     * The supported values are "proto2" and "proto3".
-     * 
- * - * optional string syntax = 12; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getSyntaxBytes() { - java.lang.Object ref = syntax_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - syntax_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - for (int i = 0; i < getMessageTypeCount(); i++) { - if (!getMessageType(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getEnumTypeCount(); i++) { - if (!getEnumType(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getServiceCount(); i++) { - if (!getService(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getExtensionCount(); i++) { - if (!getExtension(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasOptions()) { - if (!getOptions().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 2, package_); - } - for (int i = 0; i < dependency_.size(); i++) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 3, dependency_.getRaw(i)); - } - for (int i = 0; i < messageType_.size(); i++) { - output.writeMessage(4, messageType_.get(i)); - } - for (int i = 0; i < enumType_.size(); i++) { - output.writeMessage(5, enumType_.get(i)); - } - for (int i = 0; i < service_.size(); i++) { - output.writeMessage(6, service_.get(i)); - } - for (int i = 0; i < extension_.size(); i++) { - output.writeMessage(7, extension_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(8, getOptions()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeMessage(9, getSourceCodeInfo()); - } - for (int i = 0; i < publicDependency_.size(); i++) { - output.writeInt32(10, publicDependency_.get(i)); - } - for (int i = 0; i < weakDependency_.size(); i++) { - output.writeInt32(11, weakDependency_.get(i)); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 12, syntax_); - } - unknownFields.writeTo(output); - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(2, package_); - } - { - int dataSize = 0; - for (int i = 0; i < dependency_.size(); i++) { - dataSize += computeStringSizeNoTag(dependency_.getRaw(i)); - } - size += dataSize; - size += 1 * getDependencyList().size(); - } - for (int i = 0; i < messageType_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(4, messageType_.get(i)); - } - for (int i = 0; i < enumType_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(5, enumType_.get(i)); - } - for (int i = 0; i < service_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(6, service_.get(i)); - } - for (int i = 0; i < extension_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(7, extension_.get(i)); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(8, getOptions()); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(9, getSourceCodeInfo()); - } - { - int dataSize = 0; - for (int i = 0; i < publicDependency_.size(); i++) { - dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32SizeNoTag(publicDependency_.get(i)); - } - size += dataSize; - size += 1 * getPublicDependencyList().size(); - } - { - int dataSize = 0; - for (int i = 0; i < weakDependency_.size(); i++) { - dataSize += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32SizeNoTag(weakDependency_.get(i)); - } - size += dataSize; - size += 1 * getWeakDependencyList().size(); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(12, syntax_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && (hasPackage() == other.hasPackage()); - if (hasPackage()) { - result = result && getPackage() - .equals(other.getPackage()); - } - result = result && getDependencyList() - .equals(other.getDependencyList()); - result = result && getPublicDependencyList() - .equals(other.getPublicDependencyList()); - result = result && getWeakDependencyList() - .equals(other.getWeakDependencyList()); - result = result && getMessageTypeList() - .equals(other.getMessageTypeList()); - result = result && getEnumTypeList() - .equals(other.getEnumTypeList()); - result = result && getServiceList() - .equals(other.getServiceList()); - result = result && getExtensionList() - .equals(other.getExtensionList()); - result = result && (hasOptions() == other.hasOptions()); - if (hasOptions()) { - result = result && getOptions() - .equals(other.getOptions()); - } - result = result && (hasSourceCodeInfo() == other.hasSourceCodeInfo()); - if (hasSourceCodeInfo()) { - result = result && getSourceCodeInfo() - .equals(other.getSourceCodeInfo()); - } - result = result && (hasSyntax() == other.hasSyntax()); - if (hasSyntax()) { - result = result && getSyntax() - .equals(other.getSyntax()); - } - result = result && unknownFields.equals(other.unknownFields); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (hasPackage()) { - hash = (37 * hash) + PACKAGE_FIELD_NUMBER; - hash = (53 * hash) + getPackage().hashCode(); - } - if (getDependencyCount() > 0) { - hash = (37 * hash) + DEPENDENCY_FIELD_NUMBER; - hash = (53 * hash) + getDependencyList().hashCode(); - } - if (getPublicDependencyCount() > 0) { - hash = (37 * hash) + PUBLIC_DEPENDENCY_FIELD_NUMBER; - hash = (53 * hash) + getPublicDependencyList().hashCode(); - } - if (getWeakDependencyCount() > 0) { - hash = (37 * hash) + WEAK_DEPENDENCY_FIELD_NUMBER; - hash = (53 * hash) + getWeakDependencyList().hashCode(); - } - if (getMessageTypeCount() > 0) { - hash = (37 * hash) + MESSAGE_TYPE_FIELD_NUMBER; - hash = (53 * hash) + getMessageTypeList().hashCode(); - } - if (getEnumTypeCount() > 0) { - hash = (37 * hash) + ENUM_TYPE_FIELD_NUMBER; - hash = (53 * hash) + getEnumTypeList().hashCode(); - } - if (getServiceCount() > 0) { - hash = (37 * hash) + SERVICE_FIELD_NUMBER; - hash = (53 * hash) + getServiceList().hashCode(); - } - if (getExtensionCount() > 0) { - hash = (37 * hash) + EXTENSION_FIELD_NUMBER; - hash = (53 * hash) + getExtensionList().hashCode(); - } - if (hasOptions()) { - hash = (37 * hash) + OPTIONS_FIELD_NUMBER; - hash = (53 * hash) + getOptions().hashCode(); - } - if (hasSourceCodeInfo()) { - hash = (37 * hash) + SOURCE_CODE_INFO_FIELD_NUMBER; - hash = (53 * hash) + getSourceCodeInfo().hashCode(); - } - if (hasSyntax()) { - hash = (37 * hash) + SYNTAX_FIELD_NUMBER; - hash = (53 * hash) + getSyntax().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-     * Describes a complete .proto file.
-     * 
- * - * Protobuf type {@code google.protobuf.FileDescriptorProto} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.FileDescriptorProto) - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProtoOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getMessageTypeFieldBuilder(); - getEnumTypeFieldBuilder(); - getServiceFieldBuilder(); - getExtensionFieldBuilder(); - getOptionsFieldBuilder(); - getSourceCodeInfoFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - package_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - publicDependency_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - weakDependency_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - if (messageTypeBuilder_ == null) { - messageType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - } else { - messageTypeBuilder_.clear(); - } - if (enumTypeBuilder_ == null) { - enumType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000040); - } else { - enumTypeBuilder_.clear(); - } - if (serviceBuilder_ == null) { - service_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000080); - } else { - serviceBuilder_.clear(); - } - if (extensionBuilder_ == null) { - extension_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000100); - } else { - extensionBuilder_.clear(); - } - if (optionsBuilder_ == null) { - options_ = null; - } else { - optionsBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000200); - if (sourceCodeInfoBuilder_ == null) { - sourceCodeInfo_ = null; - } else { - sourceCodeInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000400); - syntax_ = ""; - bitField0_ = (bitField0_ & ~0x00000800); - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FileDescriptorProto_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.package_ = package_; - if (((bitField0_ & 0x00000004) == 0x00000004)) { - dependency_ = dependency_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.dependency_ = dependency_; - if (((bitField0_ & 0x00000008) == 0x00000008)) { - publicDependency_ = java.util.Collections.unmodifiableList(publicDependency_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.publicDependency_ = publicDependency_; - if (((bitField0_ & 0x00000010) == 0x00000010)) { - weakDependency_ = java.util.Collections.unmodifiableList(weakDependency_); - bitField0_ = (bitField0_ & ~0x00000010); - } - result.weakDependency_ = weakDependency_; - if (messageTypeBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020)) { - messageType_ = java.util.Collections.unmodifiableList(messageType_); - bitField0_ = (bitField0_ & ~0x00000020); - } - result.messageType_ = messageType_; - } else { - result.messageType_ = messageTypeBuilder_.build(); - } - if (enumTypeBuilder_ == null) { - if (((bitField0_ & 0x00000040) == 0x00000040)) { - enumType_ = java.util.Collections.unmodifiableList(enumType_); - bitField0_ = (bitField0_ & ~0x00000040); - } - result.enumType_ = enumType_; - } else { - result.enumType_ = enumTypeBuilder_.build(); - } - if (serviceBuilder_ == null) { - if (((bitField0_ & 0x00000080) == 0x00000080)) { - service_ = java.util.Collections.unmodifiableList(service_); - bitField0_ = (bitField0_ & ~0x00000080); - } - result.service_ = service_; - } else { - result.service_ = serviceBuilder_.build(); - } - if (extensionBuilder_ == null) { - if (((bitField0_ & 0x00000100) == 0x00000100)) { - extension_ = java.util.Collections.unmodifiableList(extension_); - bitField0_ = (bitField0_ & ~0x00000100); - } - result.extension_ = extension_; - } else { - result.extension_ = extensionBuilder_.build(); - } - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000004; - } - if (optionsBuilder_ == null) { - result.options_ = options_; - } else { - result.options_ = optionsBuilder_.build(); - } - if (((from_bitField0_ & 0x00000400) == 0x00000400)) { - to_bitField0_ |= 0x00000008; - } - if (sourceCodeInfoBuilder_ == null) { - result.sourceCodeInfo_ = sourceCodeInfo_; - } else { - result.sourceCodeInfo_ = sourceCodeInfoBuilder_.build(); - } - if (((from_bitField0_ & 0x00000800) == 0x00000800)) { - to_bitField0_ |= 0x00000010; - } - result.syntax_ = syntax_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto.getDefaultInstance()) return this; - if (other.hasName()) { - bitField0_ |= 0x00000001; - name_ = other.name_; - onChanged(); - } - if (other.hasPackage()) { - bitField0_ |= 0x00000002; - package_ = other.package_; - onChanged(); - } - if (!other.dependency_.isEmpty()) { - if (dependency_.isEmpty()) { - dependency_ = other.dependency_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureDependencyIsMutable(); - dependency_.addAll(other.dependency_); - } - onChanged(); - } - if (!other.publicDependency_.isEmpty()) { - if (publicDependency_.isEmpty()) { - publicDependency_ = other.publicDependency_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensurePublicDependencyIsMutable(); - publicDependency_.addAll(other.publicDependency_); - } - onChanged(); - } - if (!other.weakDependency_.isEmpty()) { - if (weakDependency_.isEmpty()) { - weakDependency_ = other.weakDependency_; - bitField0_ = (bitField0_ & ~0x00000010); - } else { - ensureWeakDependencyIsMutable(); - weakDependency_.addAll(other.weakDependency_); - } - onChanged(); - } - if (messageTypeBuilder_ == null) { - if (!other.messageType_.isEmpty()) { - if (messageType_.isEmpty()) { - messageType_ = other.messageType_; - bitField0_ = (bitField0_ & ~0x00000020); - } else { - ensureMessageTypeIsMutable(); - messageType_.addAll(other.messageType_); - } - onChanged(); - } - } else { - if (!other.messageType_.isEmpty()) { - if (messageTypeBuilder_.isEmpty()) { - messageTypeBuilder_.dispose(); - messageTypeBuilder_ = null; - messageType_ = other.messageType_; - bitField0_ = (bitField0_ & ~0x00000020); - messageTypeBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getMessageTypeFieldBuilder() : null; - } else { - messageTypeBuilder_.addAllMessages(other.messageType_); - } - } - } - if (enumTypeBuilder_ == null) { - if (!other.enumType_.isEmpty()) { - if (enumType_.isEmpty()) { - enumType_ = other.enumType_; - bitField0_ = (bitField0_ & ~0x00000040); - } else { - ensureEnumTypeIsMutable(); - enumType_.addAll(other.enumType_); - } - onChanged(); - } - } else { - if (!other.enumType_.isEmpty()) { - if (enumTypeBuilder_.isEmpty()) { - enumTypeBuilder_.dispose(); - enumTypeBuilder_ = null; - enumType_ = other.enumType_; - bitField0_ = (bitField0_ & ~0x00000040); - enumTypeBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getEnumTypeFieldBuilder() : null; - } else { - enumTypeBuilder_.addAllMessages(other.enumType_); - } - } - } - if (serviceBuilder_ == null) { - if (!other.service_.isEmpty()) { - if (service_.isEmpty()) { - service_ = other.service_; - bitField0_ = (bitField0_ & ~0x00000080); - } else { - ensureServiceIsMutable(); - service_.addAll(other.service_); - } - onChanged(); - } - } else { - if (!other.service_.isEmpty()) { - if (serviceBuilder_.isEmpty()) { - serviceBuilder_.dispose(); - serviceBuilder_ = null; - service_ = other.service_; - bitField0_ = (bitField0_ & ~0x00000080); - serviceBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getServiceFieldBuilder() : null; - } else { - serviceBuilder_.addAllMessages(other.service_); - } - } - } - if (extensionBuilder_ == null) { - if (!other.extension_.isEmpty()) { - if (extension_.isEmpty()) { - extension_ = other.extension_; - bitField0_ = (bitField0_ & ~0x00000100); - } else { - ensureExtensionIsMutable(); - extension_.addAll(other.extension_); - } - onChanged(); - } - } else { - if (!other.extension_.isEmpty()) { - if (extensionBuilder_.isEmpty()) { - extensionBuilder_.dispose(); - extensionBuilder_ = null; - extension_ = other.extension_; - bitField0_ = (bitField0_ & ~0x00000100); - extensionBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getExtensionFieldBuilder() : null; - } else { - extensionBuilder_.addAllMessages(other.extension_); - } - } - } - if (other.hasOptions()) { - mergeOptions(other.getOptions()); - } - if (other.hasSourceCodeInfo()) { - mergeSourceCodeInfo(other.getSourceCodeInfo()); - } - if (other.hasSyntax()) { - bitField0_ |= 0x00000800; - syntax_ = other.syntax_; - onChanged(); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getMessageTypeCount(); i++) { - if (!getMessageType(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getEnumTypeCount(); i++) { - if (!getEnumType(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getServiceCount(); i++) { - if (!getService(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getExtensionCount(); i++) { - if (!getExtension(i).isInitialized()) { - return false; - } - } - if (hasOptions()) { - if (!getOptions().isInitialized()) { - return false; - } - } - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.lang.Object name_ = ""; - /** - *
-       * file name, relative to root of source tree
-       * 
- * - * optional string name = 1; - */ - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - *
-       * file name, relative to root of source tree
-       * 
- * - * optional string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-       * file name, relative to root of source tree
-       * 
- * - * optional string name = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - *
-       * file name, relative to root of source tree
-       * 
- * - * optional string name = 1; - */ - public Builder setName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - /** - *
-       * file name, relative to root of source tree
-       * 
- * - * optional string name = 1; - */ - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - /** - *
-       * file name, relative to root of source tree
-       * 
- * - * optional string name = 1; - */ - public Builder setNameBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - - private java.lang.Object package_ = ""; - /** - *
-       * e.g. "foo", "foo.bar", etc.
-       * 
- * - * optional string package = 2; - */ - public boolean hasPackage() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - *
-       * e.g. "foo", "foo.bar", etc.
-       * 
- * - * optional string package = 2; - */ - public java.lang.String getPackage() { - java.lang.Object ref = package_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - package_ = s; - } - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-       * e.g. "foo", "foo.bar", etc.
-       * 
- * - * optional string package = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getPackageBytes() { - java.lang.Object ref = package_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - package_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - *
-       * e.g. "foo", "foo.bar", etc.
-       * 
- * - * optional string package = 2; - */ - public Builder setPackage( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - package_ = value; - onChanged(); - return this; - } - /** - *
-       * e.g. "foo", "foo.bar", etc.
-       * 
- * - * optional string package = 2; - */ - public Builder clearPackage() { - bitField0_ = (bitField0_ & ~0x00000002); - package_ = getDefaultInstance().getPackage(); - onChanged(); - return this; - } - /** - *
-       * e.g. "foo", "foo.bar", etc.
-       * 
- * - * optional string package = 2; - */ - public Builder setPackageBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - package_ = value; - onChanged(); - return this; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureDependencyIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - dependency_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(dependency_); - bitField0_ |= 0x00000004; - } - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList - getDependencyList() { - return dependency_.getUnmodifiableView(); - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public int getDependencyCount() { - return dependency_.size(); - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public java.lang.String getDependency(int index) { - return dependency_.get(index); - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getDependencyBytes(int index) { - return dependency_.getByteString(index); - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public Builder setDependency( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureDependencyIsMutable(); - dependency_.set(index, value); - onChanged(); - return this; - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public Builder addDependency( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureDependencyIsMutable(); - dependency_.add(value); - onChanged(); - return this; - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public Builder addAllDependency( - java.lang.Iterable values) { - ensureDependencyIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, dependency_); - onChanged(); - return this; - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public Builder clearDependency() { - dependency_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - return this; - } - /** - *
-       * Names of files imported by this file.
-       * 
- * - * repeated string dependency = 3; - */ - public Builder addDependencyBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureDependencyIsMutable(); - dependency_.add(value); - onChanged(); - return this; - } - - private java.util.List publicDependency_ = java.util.Collections.emptyList(); - private void ensurePublicDependencyIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - publicDependency_ = new java.util.ArrayList(publicDependency_); - bitField0_ |= 0x00000008; - } - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public java.util.List - getPublicDependencyList() { - return java.util.Collections.unmodifiableList(publicDependency_); - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public int getPublicDependencyCount() { - return publicDependency_.size(); - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public int getPublicDependency(int index) { - return publicDependency_.get(index); - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public Builder setPublicDependency( - int index, int value) { - ensurePublicDependencyIsMutable(); - publicDependency_.set(index, value); - onChanged(); - return this; - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public Builder addPublicDependency(int value) { - ensurePublicDependencyIsMutable(); - publicDependency_.add(value); - onChanged(); - return this; - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public Builder addAllPublicDependency( - java.lang.Iterable values) { - ensurePublicDependencyIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, publicDependency_); - onChanged(); - return this; - } - /** - *
-       * Indexes of the public imported files in the dependency list above.
-       * 
- * - * repeated int32 public_dependency = 10; - */ - public Builder clearPublicDependency() { - publicDependency_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - return this; - } - - private java.util.List weakDependency_ = java.util.Collections.emptyList(); - private void ensureWeakDependencyIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - weakDependency_ = new java.util.ArrayList(weakDependency_); - bitField0_ |= 0x00000010; - } - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public java.util.List - getWeakDependencyList() { - return java.util.Collections.unmodifiableList(weakDependency_); - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public int getWeakDependencyCount() { - return weakDependency_.size(); - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public int getWeakDependency(int index) { - return weakDependency_.get(index); - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public Builder setWeakDependency( - int index, int value) { - ensureWeakDependencyIsMutable(); - weakDependency_.set(index, value); - onChanged(); - return this; - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public Builder addWeakDependency(int value) { - ensureWeakDependencyIsMutable(); - weakDependency_.add(value); - onChanged(); - return this; - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public Builder addAllWeakDependency( - java.lang.Iterable values) { - ensureWeakDependencyIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, weakDependency_); - onChanged(); - return this; - } - /** - *
-       * Indexes of the weak imported files in the dependency list.
-       * For Google-internal migration only. Do not use.
-       * 
- * - * repeated int32 weak_dependency = 11; - */ - public Builder clearWeakDependency() { - weakDependency_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - onChanged(); - return this; - } - - private java.util.List messageType_ = - java.util.Collections.emptyList(); - private void ensureMessageTypeIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - messageType_ = new java.util.ArrayList(messageType_); - bitField0_ |= 0x00000020; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> messageTypeBuilder_; - - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public java.util.List getMessageTypeList() { - if (messageTypeBuilder_ == null) { - return java.util.Collections.unmodifiableList(messageType_); - } else { - return messageTypeBuilder_.getMessageList(); - } - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public int getMessageTypeCount() { - if (messageTypeBuilder_ == null) { - return messageType_.size(); - } else { - return messageTypeBuilder_.getCount(); - } - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getMessageType(int index) { - if (messageTypeBuilder_ == null) { - return messageType_.get(index); - } else { - return messageTypeBuilder_.getMessage(index); - } - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder setMessageType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { - if (messageTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMessageTypeIsMutable(); - messageType_.set(index, value); - onChanged(); - } else { - messageTypeBuilder_.setMessage(index, value); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder setMessageType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { - if (messageTypeBuilder_ == null) { - ensureMessageTypeIsMutable(); - messageType_.set(index, builderForValue.build()); - onChanged(); - } else { - messageTypeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder addMessageType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { - if (messageTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMessageTypeIsMutable(); - messageType_.add(value); - onChanged(); - } else { - messageTypeBuilder_.addMessage(value); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder addMessageType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { - if (messageTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMessageTypeIsMutable(); - messageType_.add(index, value); - onChanged(); - } else { - messageTypeBuilder_.addMessage(index, value); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder addMessageType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { - if (messageTypeBuilder_ == null) { - ensureMessageTypeIsMutable(); - messageType_.add(builderForValue.build()); - onChanged(); - } else { - messageTypeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder addMessageType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { - if (messageTypeBuilder_ == null) { - ensureMessageTypeIsMutable(); - messageType_.add(index, builderForValue.build()); - onChanged(); - } else { - messageTypeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder addAllMessageType( - java.lang.Iterable values) { - if (messageTypeBuilder_ == null) { - ensureMessageTypeIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, messageType_); - onChanged(); - } else { - messageTypeBuilder_.addAllMessages(values); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder clearMessageType() { - if (messageTypeBuilder_ == null) { - messageType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - } else { - messageTypeBuilder_.clear(); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public Builder removeMessageType(int index) { - if (messageTypeBuilder_ == null) { - ensureMessageTypeIsMutable(); - messageType_.remove(index); - onChanged(); - } else { - messageTypeBuilder_.remove(index); - } - return this; - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder getMessageTypeBuilder( - int index) { - return getMessageTypeFieldBuilder().getBuilder(index); - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getMessageTypeOrBuilder( - int index) { - if (messageTypeBuilder_ == null) { - return messageType_.get(index); } else { - return messageTypeBuilder_.getMessageOrBuilder(index); - } - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public java.util.List - getMessageTypeOrBuilderList() { - if (messageTypeBuilder_ != null) { - return messageTypeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(messageType_); - } - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addMessageTypeBuilder() { - return getMessageTypeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addMessageTypeBuilder( - int index) { - return getMessageTypeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); - } - /** - *
-       * All top-level definitions in this file.
-       * 
- * - * repeated .google.protobuf.DescriptorProto message_type = 4; - */ - public java.util.List - getMessageTypeBuilderList() { - return getMessageTypeFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> - getMessageTypeFieldBuilder() { - if (messageTypeBuilder_ == null) { - messageTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder>( - messageType_, - ((bitField0_ & 0x00000020) == 0x00000020), - getParentForChildren(), - isClean()); - messageType_ = null; - } - return messageTypeBuilder_; - } - - private java.util.List enumType_ = - java.util.Collections.emptyList(); - private void ensureEnumTypeIsMutable() { - if (!((bitField0_ & 0x00000040) == 0x00000040)) { - enumType_ = new java.util.ArrayList(enumType_); - bitField0_ |= 0x00000040; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> enumTypeBuilder_; - - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public java.util.List getEnumTypeList() { - if (enumTypeBuilder_ == null) { - return java.util.Collections.unmodifiableList(enumType_); - } else { - return enumTypeBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public int getEnumTypeCount() { - if (enumTypeBuilder_ == null) { - return enumType_.size(); - } else { - return enumTypeBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { - if (enumTypeBuilder_ == null) { - return enumType_.get(index); - } else { - return enumTypeBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder setEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { - if (enumTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEnumTypeIsMutable(); - enumType_.set(index, value); - onChanged(); - } else { - enumTypeBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder setEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.set(index, builderForValue.build()); - onChanged(); - } else { - enumTypeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder addEnumType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { - if (enumTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEnumTypeIsMutable(); - enumType_.add(value); - onChanged(); - } else { - enumTypeBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder addEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { - if (enumTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEnumTypeIsMutable(); - enumType_.add(index, value); - onChanged(); - } else { - enumTypeBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder addEnumType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.add(builderForValue.build()); - onChanged(); - } else { - enumTypeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder addEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.add(index, builderForValue.build()); - onChanged(); - } else { - enumTypeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder addAllEnumType( - java.lang.Iterable values) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, enumType_); - onChanged(); - } else { - enumTypeBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder clearEnumType() { - if (enumTypeBuilder_ == null) { - enumType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000040); - onChanged(); - } else { - enumTypeBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public Builder removeEnumType(int index) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.remove(index); - onChanged(); - } else { - enumTypeBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder getEnumTypeBuilder( - int index) { - return getEnumTypeFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( - int index) { - if (enumTypeBuilder_ == null) { - return enumType_.get(index); } else { - return enumTypeBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public java.util.List - getEnumTypeOrBuilderList() { - if (enumTypeBuilder_ != null) { - return enumTypeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(enumType_); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder() { - return getEnumTypeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder( - int index) { - return getEnumTypeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 5; - */ - public java.util.List - getEnumTypeBuilderList() { - return getEnumTypeFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> - getEnumTypeFieldBuilder() { - if (enumTypeBuilder_ == null) { - enumTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder>( - enumType_, - ((bitField0_ & 0x00000040) == 0x00000040), - getParentForChildren(), - isClean()); - enumType_ = null; - } - return enumTypeBuilder_; - } - - private java.util.List service_ = - java.util.Collections.emptyList(); - private void ensureServiceIsMutable() { - if (!((bitField0_ & 0x00000080) == 0x00000080)) { - service_ = new java.util.ArrayList(service_); - bitField0_ |= 0x00000080; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> serviceBuilder_; - - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public java.util.List getServiceList() { - if (serviceBuilder_ == null) { - return java.util.Collections.unmodifiableList(service_); - } else { - return serviceBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public int getServiceCount() { - if (serviceBuilder_ == null) { - return service_.size(); - } else { - return serviceBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto getService(int index) { - if (serviceBuilder_ == null) { - return service_.get(index); - } else { - return serviceBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder setService( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto value) { - if (serviceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureServiceIsMutable(); - service_.set(index, value); - onChanged(); - } else { - serviceBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder setService( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder builderForValue) { - if (serviceBuilder_ == null) { - ensureServiceIsMutable(); - service_.set(index, builderForValue.build()); - onChanged(); - } else { - serviceBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder addService(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto value) { - if (serviceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureServiceIsMutable(); - service_.add(value); - onChanged(); - } else { - serviceBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder addService( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto value) { - if (serviceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureServiceIsMutable(); - service_.add(index, value); - onChanged(); - } else { - serviceBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder addService( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder builderForValue) { - if (serviceBuilder_ == null) { - ensureServiceIsMutable(); - service_.add(builderForValue.build()); - onChanged(); - } else { - serviceBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder addService( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder builderForValue) { - if (serviceBuilder_ == null) { - ensureServiceIsMutable(); - service_.add(index, builderForValue.build()); - onChanged(); - } else { - serviceBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder addAllService( - java.lang.Iterable values) { - if (serviceBuilder_ == null) { - ensureServiceIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, service_); - onChanged(); - } else { - serviceBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder clearService() { - if (serviceBuilder_ == null) { - service_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000080); - onChanged(); - } else { - serviceBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public Builder removeService(int index) { - if (serviceBuilder_ == null) { - ensureServiceIsMutable(); - service_.remove(index); - onChanged(); - } else { - serviceBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder getServiceBuilder( - int index) { - return getServiceFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder getServiceOrBuilder( - int index) { - if (serviceBuilder_ == null) { - return service_.get(index); } else { - return serviceBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public java.util.List - getServiceOrBuilderList() { - if (serviceBuilder_ != null) { - return serviceBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(service_); - } - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder addServiceBuilder() { - return getServiceFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder addServiceBuilder( - int index) { - return getServiceFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.ServiceDescriptorProto service = 6; - */ - public java.util.List - getServiceBuilderList() { - return getServiceFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder> - getServiceFieldBuilder() { - if (serviceBuilder_ == null) { - serviceBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProtoOrBuilder>( - service_, - ((bitField0_ & 0x00000080) == 0x00000080), - getParentForChildren(), - isClean()); - service_ = null; - } - return serviceBuilder_; - } - - private java.util.List extension_ = - java.util.Collections.emptyList(); - private void ensureExtensionIsMutable() { - if (!((bitField0_ & 0x00000100) == 0x00000100)) { - extension_ = new java.util.ArrayList(extension_); - bitField0_ |= 0x00000100; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> extensionBuilder_; - - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public java.util.List getExtensionList() { - if (extensionBuilder_ == null) { - return java.util.Collections.unmodifiableList(extension_); - } else { - return extensionBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public int getExtensionCount() { - if (extensionBuilder_ == null) { - return extension_.size(); - } else { - return extensionBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { - if (extensionBuilder_ == null) { - return extension_.get(index); - } else { - return extensionBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder setExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (extensionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionIsMutable(); - extension_.set(index, value); - onChanged(); - } else { - extensionBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder setExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.set(index, builderForValue.build()); - onChanged(); - } else { - extensionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder addExtension(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (extensionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionIsMutable(); - extension_.add(value); - onChanged(); - } else { - extensionBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder addExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (extensionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionIsMutable(); - extension_.add(index, value); - onChanged(); - } else { - extensionBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder addExtension( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.add(builderForValue.build()); - onChanged(); - } else { - extensionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder addExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.add(index, builderForValue.build()); - onChanged(); - } else { - extensionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder addAllExtension( - java.lang.Iterable values) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, extension_); - onChanged(); - } else { - extensionBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder clearExtension() { - if (extensionBuilder_ == null) { - extension_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000100); - onChanged(); - } else { - extensionBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public Builder removeExtension(int index) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.remove(index); - onChanged(); - } else { - extensionBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder getExtensionBuilder( - int index) { - return getExtensionFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( - int index) { - if (extensionBuilder_ == null) { - return extension_.get(index); } else { - return extensionBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public java.util.List - getExtensionOrBuilderList() { - if (extensionBuilder_ != null) { - return extensionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(extension_); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder() { - return getExtensionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder( - int index) { - return getExtensionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 7; - */ - public java.util.List - getExtensionBuilderList() { - return getExtensionFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> - getExtensionFieldBuilder() { - if (extensionBuilder_ == null) { - extensionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder>( - extension_, - ((bitField0_ & 0x00000100) == 0x00000100), - getParentForChildren(), - isClean()); - extension_ = null; - } - return extensionBuilder_; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions options_ = null; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder> optionsBuilder_; - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public boolean hasOptions() { - return ((bitField0_ & 0x00000200) == 0x00000200); - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions getOptions() { - if (optionsBuilder_ == null) { - return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; - } else { - return optionsBuilder_.getMessage(); - } - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions value) { - if (optionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - options_ = value; - onChanged(); - } else { - optionsBuilder_.setMessage(value); - } - bitField0_ |= 0x00000200; - return this; - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public Builder setOptions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder builderForValue) { - if (optionsBuilder_ == null) { - options_ = builderForValue.build(); - onChanged(); - } else { - optionsBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000200; - return this; - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions value) { - if (optionsBuilder_ == null) { - if (((bitField0_ & 0x00000200) == 0x00000200) && - options_ != null && - options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance()) { - options_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.newBuilder(options_).mergeFrom(value).buildPartial(); - } else { - options_ = value; - } - onChanged(); - } else { - optionsBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000200; - return this; - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public Builder clearOptions() { - if (optionsBuilder_ == null) { - options_ = null; - onChanged(); - } else { - optionsBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000200); - return this; - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder getOptionsBuilder() { - bitField0_ |= 0x00000200; - onChanged(); - return getOptionsFieldBuilder().getBuilder(); - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder getOptionsOrBuilder() { - if (optionsBuilder_ != null) { - return optionsBuilder_.getMessageOrBuilder(); - } else { - return options_ == null ? - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.getDefaultInstance() : options_; - } - } - /** - * optional .google.protobuf.FileOptions options = 8; - */ - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder> - getOptionsFieldBuilder() { - if (optionsBuilder_ == null) { - optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptionsOrBuilder>( - getOptions(), - getParentForChildren(), - isClean()); - options_ = null; - } - return optionsBuilder_; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo sourceCodeInfo_ = null; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder> sourceCodeInfoBuilder_; - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public boolean hasSourceCodeInfo() { - return ((bitField0_ & 0x00000400) == 0x00000400); - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo getSourceCodeInfo() { - if (sourceCodeInfoBuilder_ == null) { - return sourceCodeInfo_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; - } else { - return sourceCodeInfoBuilder_.getMessage(); - } - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public Builder setSourceCodeInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo value) { - if (sourceCodeInfoBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - sourceCodeInfo_ = value; - onChanged(); - } else { - sourceCodeInfoBuilder_.setMessage(value); - } - bitField0_ |= 0x00000400; - return this; - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public Builder setSourceCodeInfo( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder builderForValue) { - if (sourceCodeInfoBuilder_ == null) { - sourceCodeInfo_ = builderForValue.build(); - onChanged(); - } else { - sourceCodeInfoBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000400; - return this; - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public Builder mergeSourceCodeInfo(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo value) { - if (sourceCodeInfoBuilder_ == null) { - if (((bitField0_ & 0x00000400) == 0x00000400) && - sourceCodeInfo_ != null && - sourceCodeInfo_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance()) { - sourceCodeInfo_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.newBuilder(sourceCodeInfo_).mergeFrom(value).buildPartial(); - } else { - sourceCodeInfo_ = value; - } - onChanged(); - } else { - sourceCodeInfoBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000400; - return this; - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public Builder clearSourceCodeInfo() { - if (sourceCodeInfoBuilder_ == null) { - sourceCodeInfo_ = null; - onChanged(); - } else { - sourceCodeInfoBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000400); - return this; - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder getSourceCodeInfoBuilder() { - bitField0_ |= 0x00000400; - onChanged(); - return getSourceCodeInfoFieldBuilder().getBuilder(); - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder getSourceCodeInfoOrBuilder() { - if (sourceCodeInfoBuilder_ != null) { - return sourceCodeInfoBuilder_.getMessageOrBuilder(); - } else { - return sourceCodeInfo_ == null ? - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.getDefaultInstance() : sourceCodeInfo_; - } - } - /** - *
-       * This field contains optional information about the original source code.
-       * You may safely remove this entire field without harming runtime
-       * functionality of the descriptors -- the information is needed only by
-       * development tools.
-       * 
- * - * optional .google.protobuf.SourceCodeInfo source_code_info = 9; - */ - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder> - getSourceCodeInfoFieldBuilder() { - if (sourceCodeInfoBuilder_ == null) { - sourceCodeInfoBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfoOrBuilder>( - getSourceCodeInfo(), - getParentForChildren(), - isClean()); - sourceCodeInfo_ = null; - } - return sourceCodeInfoBuilder_; - } - - private java.lang.Object syntax_ = ""; - /** - *
-       * The syntax of the proto file.
-       * The supported values are "proto2" and "proto3".
-       * 
- * - * optional string syntax = 12; - */ - public boolean hasSyntax() { - return ((bitField0_ & 0x00000800) == 0x00000800); - } - /** - *
-       * The syntax of the proto file.
-       * The supported values are "proto2" and "proto3".
-       * 
- * - * optional string syntax = 12; - */ - public java.lang.String getSyntax() { - java.lang.Object ref = syntax_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - syntax_ = s; - } - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-       * The syntax of the proto file.
-       * The supported values are "proto2" and "proto3".
-       * 
- * - * optional string syntax = 12; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getSyntaxBytes() { - java.lang.Object ref = syntax_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - syntax_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - *
-       * The syntax of the proto file.
-       * The supported values are "proto2" and "proto3".
-       * 
- * - * optional string syntax = 12; - */ - public Builder setSyntax( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000800; - syntax_ = value; - onChanged(); - return this; - } - /** - *
-       * The syntax of the proto file.
-       * The supported values are "proto2" and "proto3".
-       * 
- * - * optional string syntax = 12; - */ - public Builder clearSyntax() { - bitField0_ = (bitField0_ & ~0x00000800); - syntax_ = getDefaultInstance().getSyntax(); - onChanged(); - return this; - } - /** - *
-       * The syntax of the proto file.
-       * The supported values are "proto2" and "proto3".
-       * 
- * - * optional string syntax = 12; - */ - public Builder setSyntaxBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000800; - syntax_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.FileDescriptorProto) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public FileDescriptorProto parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new FileDescriptorProto(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface DescriptorProtoOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.DescriptorProto) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - * optional string name = 1; - */ - boolean hasName(); - /** - * optional string name = 1; - */ - java.lang.String getName(); - /** - * optional string name = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes(); - - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - java.util.List - getFieldList(); - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getField(int index); - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - int getFieldCount(); - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - java.util.List - getFieldOrBuilderList(); - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getFieldOrBuilder( - int index); - - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - java.util.List - getExtensionList(); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - int getExtensionCount(); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - java.util.List - getExtensionOrBuilderList(); - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( - int index); - - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - java.util.List - getNestedTypeList(); - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getNestedType(int index); - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - int getNestedTypeCount(); - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - java.util.List - getNestedTypeOrBuilderList(); - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getNestedTypeOrBuilder( - int index); - - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - java.util.List - getEnumTypeList(); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - int getEnumTypeCount(); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - java.util.List - getEnumTypeOrBuilderList(); - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( - int index); - - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - java.util.List - getExtensionRangeList(); - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getExtensionRange(int index); - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - int getExtensionRangeCount(); - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - java.util.List - getExtensionRangeOrBuilderList(); - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder getExtensionRangeOrBuilder( - int index); - - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - java.util.List - getOneofDeclList(); - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getOneofDecl(int index); - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - int getOneofDeclCount(); - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - java.util.List - getOneofDeclOrBuilderList(); - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder getOneofDeclOrBuilder( - int index); - - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - boolean hasOptions(); - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getOptions(); - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder getOptionsOrBuilder(); - - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - java.util.List - getReservedRangeList(); - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getReservedRange(int index); - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - int getReservedRangeCount(); - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - java.util.List - getReservedRangeOrBuilderList(); - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder getReservedRangeOrBuilder( - int index); - - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - java.util.List - getReservedNameList(); - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - int getReservedNameCount(); - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - java.lang.String getReservedName(int index); - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getReservedNameBytes(int index); - } - /** - *
-   * Describes a message type.
-   * 
- * - * Protobuf type {@code google.protobuf.DescriptorProto} - */ - public static final class DescriptorProto extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.DescriptorProto) - DescriptorProtoOrBuilder { - // Use DescriptorProto.newBuilder() to construct. - private DescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private DescriptorProto() { - name_ = ""; - field_ = java.util.Collections.emptyList(); - extension_ = java.util.Collections.emptyList(); - nestedType_ = java.util.Collections.emptyList(); - enumType_ = java.util.Collections.emptyList(); - extensionRange_ = java.util.Collections.emptyList(); - oneofDecl_ = java.util.Collections.emptyList(); - reservedRange_ = java.util.Collections.emptyList(); - reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DescriptorProto( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000001; - name_ = bs; - break; - } - case 18: { - if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - field_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000002; - } - field_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 26: { - if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - nestedType_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000008; - } - nestedType_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.PARSER, extensionRegistry)); - break; - } - case 34: { - if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - enumType_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000010; - } - enumType_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 42: { - if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - extensionRange_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000020; - } - extensionRange_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.PARSER, extensionRegistry)); - break; - } - case 50: { - if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - extension_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000004; - } - extension_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 58: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = options_.toBuilder(); - } - options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(options_); - options_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - case 66: { - if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { - oneofDecl_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000040; - } - oneofDecl_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.PARSER, extensionRegistry)); - break; - } - case 74: { - if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { - reservedRange_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000100; - } - reservedRange_.add( - input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.PARSER, extensionRegistry)); - break; - } - case 82: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { - reservedName_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000200; - } - reservedName_.add(bs); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { - field_ = java.util.Collections.unmodifiableList(field_); - } - if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - nestedType_ = java.util.Collections.unmodifiableList(nestedType_); - } - if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { - enumType_ = java.util.Collections.unmodifiableList(enumType_); - } - if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - extensionRange_ = java.util.Collections.unmodifiableList(extensionRange_); - } - if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - extension_ = java.util.Collections.unmodifiableList(extension_); - } - if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { - oneofDecl_ = java.util.Collections.unmodifiableList(oneofDecl_); - } - if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { - reservedRange_ = java.util.Collections.unmodifiableList(reservedRange_); - } - if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { - reservedName_ = reservedName_.getUnmodifiableView(); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder.class); - } - - public interface ExtensionRangeOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.DescriptorProto.ExtensionRange) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - * optional int32 start = 1; - */ - boolean hasStart(); - /** - * optional int32 start = 1; - */ - int getStart(); - - /** - * optional int32 end = 2; - */ - boolean hasEnd(); - /** - * optional int32 end = 2; - */ - int getEnd(); - } - /** - * Protobuf type {@code google.protobuf.DescriptorProto.ExtensionRange} - */ - public static final class ExtensionRange extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.DescriptorProto.ExtensionRange) - ExtensionRangeOrBuilder { - // Use ExtensionRange.newBuilder() to construct. - private ExtensionRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ExtensionRange() { - start_ = 0; - end_ = 0; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ExtensionRange( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - start_ = input.readInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - end_ = input.readInt32(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder.class); - } - - private int bitField0_; - public static final int START_FIELD_NUMBER = 1; - private int start_; - /** - * optional int32 start = 1; - */ - public boolean hasStart() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional int32 start = 1; - */ - public int getStart() { - return start_; - } - - public static final int END_FIELD_NUMBER = 2; - private int end_; - /** - * optional int32 end = 2; - */ - public boolean hasEnd() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional int32 end = 2; - */ - public int getEnd() { - return end_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt32(1, start_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, end_); - } - unknownFields.writeTo(output); - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32Size(1, start_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32Size(2, end_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange) obj; - - boolean result = true; - result = result && (hasStart() == other.hasStart()); - if (hasStart()) { - result = result && (getStart() - == other.getStart()); - } - result = result && (hasEnd() == other.hasEnd()); - if (hasEnd()) { - result = result && (getEnd() - == other.getEnd()); - } - result = result && unknownFields.equals(other.unknownFields); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasStart()) { - hash = (37 * hash) + START_FIELD_NUMBER; - hash = (53 * hash) + getStart(); - } - if (hasEnd()) { - hash = (37 * hash) + END_FIELD_NUMBER; - hash = (53 * hash) + getEnd(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.protobuf.DescriptorProto.ExtensionRange} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.DescriptorProto.ExtensionRange) - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - start_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - end_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ExtensionRange_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.start_ = start_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.end_ = end_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance()) return this; - if (other.hasStart()) { - setStart(other.getStart()); - } - if (other.hasEnd()) { - setEnd(other.getEnd()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private int start_ ; - /** - * optional int32 start = 1; - */ - public boolean hasStart() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional int32 start = 1; - */ - public int getStart() { - return start_; - } - /** - * optional int32 start = 1; - */ - public Builder setStart(int value) { - bitField0_ |= 0x00000001; - start_ = value; - onChanged(); - return this; - } - /** - * optional int32 start = 1; - */ - public Builder clearStart() { - bitField0_ = (bitField0_ & ~0x00000001); - start_ = 0; - onChanged(); - return this; - } - - private int end_ ; - /** - * optional int32 end = 2; - */ - public boolean hasEnd() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional int32 end = 2; - */ - public int getEnd() { - return end_; - } - /** - * optional int32 end = 2; - */ - public Builder setEnd(int value) { - bitField0_ |= 0x00000002; - end_ = value; - onChanged(); - return this; - } - /** - * optional int32 end = 2; - */ - public Builder clearEnd() { - bitField0_ = (bitField0_ & ~0x00000002); - end_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.DescriptorProto.ExtensionRange) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public ExtensionRange parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new ExtensionRange(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ReservedRangeOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.DescriptorProto.ReservedRange) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - *
-       * Inclusive.
-       * 
- * - * optional int32 start = 1; - */ - boolean hasStart(); - /** - *
-       * Inclusive.
-       * 
- * - * optional int32 start = 1; - */ - int getStart(); - - /** - *
-       * Exclusive.
-       * 
- * - * optional int32 end = 2; - */ - boolean hasEnd(); - /** - *
-       * Exclusive.
-       * 
- * - * optional int32 end = 2; - */ - int getEnd(); - } - /** - *
-     * Range of reserved tag numbers. Reserved tag numbers may not be used by
-     * fields or extension ranges in the same message. Reserved ranges may
-     * not overlap.
-     * 
- * - * Protobuf type {@code google.protobuf.DescriptorProto.ReservedRange} - */ - public static final class ReservedRange extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.DescriptorProto.ReservedRange) - ReservedRangeOrBuilder { - // Use ReservedRange.newBuilder() to construct. - private ReservedRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ReservedRange() { - start_ = 0; - end_ = 0; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ReservedRange( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - start_ = input.readInt32(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - end_ = input.readInt32(); - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder.class); - } - - private int bitField0_; - public static final int START_FIELD_NUMBER = 1; - private int start_; - /** - *
-       * Inclusive.
-       * 
- * - * optional int32 start = 1; - */ - public boolean hasStart() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - *
-       * Inclusive.
-       * 
- * - * optional int32 start = 1; - */ - public int getStart() { - return start_; - } - - public static final int END_FIELD_NUMBER = 2; - private int end_; - /** - *
-       * Exclusive.
-       * 
- * - * optional int32 end = 2; - */ - public boolean hasEnd() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - *
-       * Exclusive.
-       * 
- * - * optional int32 end = 2; - */ - public int getEnd() { - return end_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt32(1, start_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeInt32(2, end_); - } - unknownFields.writeTo(output); - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32Size(1, start_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeInt32Size(2, end_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange) obj; - - boolean result = true; - result = result && (hasStart() == other.hasStart()); - if (hasStart()) { - result = result && (getStart() - == other.getStart()); - } - result = result && (hasEnd() == other.hasEnd()); - if (hasEnd()) { - result = result && (getEnd() - == other.getEnd()); - } - result = result && unknownFields.equals(other.unknownFields); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasStart()) { - hash = (37 * hash) + START_FIELD_NUMBER; - hash = (53 * hash) + getStart(); - } - if (hasEnd()) { - hash = (37 * hash) + END_FIELD_NUMBER; - hash = (53 * hash) + getEnd(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-       * Range of reserved tag numbers. Reserved tag numbers may not be used by
-       * fields or extension ranges in the same message. Reserved ranges may
-       * not overlap.
-       * 
- * - * Protobuf type {@code google.protobuf.DescriptorProto.ReservedRange} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.DescriptorProto.ReservedRange) - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - start_ = 0; - bitField0_ = (bitField0_ & ~0x00000001); - end_ = 0; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_ReservedRange_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.start_ = start_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.end_ = end_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance()) return this; - if (other.hasStart()) { - setStart(other.getStart()); - } - if (other.hasEnd()) { - setEnd(other.getEnd()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private int start_ ; - /** - *
-         * Inclusive.
-         * 
- * - * optional int32 start = 1; - */ - public boolean hasStart() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - *
-         * Inclusive.
-         * 
- * - * optional int32 start = 1; - */ - public int getStart() { - return start_; - } - /** - *
-         * Inclusive.
-         * 
- * - * optional int32 start = 1; - */ - public Builder setStart(int value) { - bitField0_ |= 0x00000001; - start_ = value; - onChanged(); - return this; - } - /** - *
-         * Inclusive.
-         * 
- * - * optional int32 start = 1; - */ - public Builder clearStart() { - bitField0_ = (bitField0_ & ~0x00000001); - start_ = 0; - onChanged(); - return this; - } - - private int end_ ; - /** - *
-         * Exclusive.
-         * 
- * - * optional int32 end = 2; - */ - public boolean hasEnd() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - *
-         * Exclusive.
-         * 
- * - * optional int32 end = 2; - */ - public int getEnd() { - return end_; - } - /** - *
-         * Exclusive.
-         * 
- * - * optional int32 end = 2; - */ - public Builder setEnd(int value) { - bitField0_ |= 0x00000002; - end_ = value; - onChanged(); - return this; - } - /** - *
-         * Exclusive.
-         * 
- * - * optional int32 end = 2; - */ - public Builder clearEnd() { - bitField0_ = (bitField0_ & ~0x00000002); - end_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.DescriptorProto.ReservedRange) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ReservedRange) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public ReservedRange parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new ReservedRange(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - private int bitField0_; - public static final int NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object name_; - /** - * optional string name = 1; - */ - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } - } - /** - * optional string name = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - - public static final int FIELD_FIELD_NUMBER = 2; - private java.util.List field_; - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public java.util.List getFieldList() { - return field_; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public java.util.List - getFieldOrBuilderList() { - return field_; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public int getFieldCount() { - return field_.size(); - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getField(int index) { - return field_.get(index); - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getFieldOrBuilder( - int index) { - return field_.get(index); - } - - public static final int EXTENSION_FIELD_NUMBER = 6; - private java.util.List extension_; - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public java.util.List getExtensionList() { - return extension_; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public java.util.List - getExtensionOrBuilderList() { - return extension_; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public int getExtensionCount() { - return extension_.size(); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { - return extension_.get(index); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( - int index) { - return extension_.get(index); - } - - public static final int NESTED_TYPE_FIELD_NUMBER = 3; - private java.util.List nestedType_; - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public java.util.List getNestedTypeList() { - return nestedType_; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public java.util.List - getNestedTypeOrBuilderList() { - return nestedType_; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public int getNestedTypeCount() { - return nestedType_.size(); - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getNestedType(int index) { - return nestedType_.get(index); - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getNestedTypeOrBuilder( - int index) { - return nestedType_.get(index); - } - - public static final int ENUM_TYPE_FIELD_NUMBER = 4; - private java.util.List enumType_; - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public java.util.List getEnumTypeList() { - return enumType_; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public java.util.List - getEnumTypeOrBuilderList() { - return enumType_; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public int getEnumTypeCount() { - return enumType_.size(); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { - return enumType_.get(index); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( - int index) { - return enumType_.get(index); - } - - public static final int EXTENSION_RANGE_FIELD_NUMBER = 5; - private java.util.List extensionRange_; - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public java.util.List getExtensionRangeList() { - return extensionRange_; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public java.util.List - getExtensionRangeOrBuilderList() { - return extensionRange_; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public int getExtensionRangeCount() { - return extensionRange_.size(); - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getExtensionRange(int index) { - return extensionRange_.get(index); - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder getExtensionRangeOrBuilder( - int index) { - return extensionRange_.get(index); - } - - public static final int ONEOF_DECL_FIELD_NUMBER = 8; - private java.util.List oneofDecl_; - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public java.util.List getOneofDeclList() { - return oneofDecl_; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public java.util.List - getOneofDeclOrBuilderList() { - return oneofDecl_; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public int getOneofDeclCount() { - return oneofDecl_.size(); - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getOneofDecl(int index) { - return oneofDecl_.get(index); - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder getOneofDeclOrBuilder( - int index) { - return oneofDecl_.get(index); - } - - public static final int OPTIONS_FIELD_NUMBER = 7; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions options_; - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public boolean hasOptions() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getOptions() { - return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder getOptionsOrBuilder() { - return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; - } - - public static final int RESERVED_RANGE_FIELD_NUMBER = 9; - private java.util.List reservedRange_; - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public java.util.List getReservedRangeList() { - return reservedRange_; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public java.util.List - getReservedRangeOrBuilderList() { - return reservedRange_; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public int getReservedRangeCount() { - return reservedRange_.size(); - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getReservedRange(int index) { - return reservedRange_.get(index); - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder getReservedRangeOrBuilder( - int index) { - return reservedRange_.get(index); - } - - public static final int RESERVED_NAME_FIELD_NUMBER = 10; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList reservedName_; - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList - getReservedNameList() { - return reservedName_; - } - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - public int getReservedNameCount() { - return reservedName_.size(); - } - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - public java.lang.String getReservedName(int index) { - return reservedName_.get(index); - } - /** - *
-     * Reserved field names, which may not be used by fields in the same message.
-     * A given name may only be reserved once.
-     * 
- * - * repeated string reserved_name = 10; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getReservedNameBytes(int index) { - return reservedName_.getByteString(index); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - for (int i = 0; i < getFieldCount(); i++) { - if (!getField(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getExtensionCount(); i++) { - if (!getExtension(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getNestedTypeCount(); i++) { - if (!getNestedType(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getEnumTypeCount(); i++) { - if (!getEnumType(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - for (int i = 0; i < getOneofDeclCount(); i++) { - if (!getOneofDecl(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasOptions()) { - if (!getOptions().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); - } - for (int i = 0; i < field_.size(); i++) { - output.writeMessage(2, field_.get(i)); - } - for (int i = 0; i < nestedType_.size(); i++) { - output.writeMessage(3, nestedType_.get(i)); - } - for (int i = 0; i < enumType_.size(); i++) { - output.writeMessage(4, enumType_.get(i)); - } - for (int i = 0; i < extensionRange_.size(); i++) { - output.writeMessage(5, extensionRange_.get(i)); - } - for (int i = 0; i < extension_.size(); i++) { - output.writeMessage(6, extension_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(7, getOptions()); - } - for (int i = 0; i < oneofDecl_.size(); i++) { - output.writeMessage(8, oneofDecl_.get(i)); - } - for (int i = 0; i < reservedRange_.size(); i++) { - output.writeMessage(9, reservedRange_.get(i)); - } - for (int i = 0; i < reservedName_.size(); i++) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 10, reservedName_.getRaw(i)); - } - unknownFields.writeTo(output); - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); - } - for (int i = 0; i < field_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(2, field_.get(i)); - } - for (int i = 0; i < nestedType_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(3, nestedType_.get(i)); - } - for (int i = 0; i < enumType_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(4, enumType_.get(i)); - } - for (int i = 0; i < extensionRange_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(5, extensionRange_.get(i)); - } - for (int i = 0; i < extension_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(6, extension_.get(i)); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(7, getOptions()); - } - for (int i = 0; i < oneofDecl_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(8, oneofDecl_.get(i)); - } - for (int i = 0; i < reservedRange_.size(); i++) { - size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream - .computeMessageSize(9, reservedRange_.get(i)); - } - { - int dataSize = 0; - for (int i = 0; i < reservedName_.size(); i++) { - dataSize += computeStringSizeNoTag(reservedName_.getRaw(i)); - } - size += dataSize; - size += 1 * getReservedNameList().size(); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto) obj; - - boolean result = true; - result = result && (hasName() == other.hasName()); - if (hasName()) { - result = result && getName() - .equals(other.getName()); - } - result = result && getFieldList() - .equals(other.getFieldList()); - result = result && getExtensionList() - .equals(other.getExtensionList()); - result = result && getNestedTypeList() - .equals(other.getNestedTypeList()); - result = result && getEnumTypeList() - .equals(other.getEnumTypeList()); - result = result && getExtensionRangeList() - .equals(other.getExtensionRangeList()); - result = result && getOneofDeclList() - .equals(other.getOneofDeclList()); - result = result && (hasOptions() == other.hasOptions()); - if (hasOptions()) { - result = result && getOptions() - .equals(other.getOptions()); - } - result = result && getReservedRangeList() - .equals(other.getReservedRangeList()); - result = result && getReservedNameList() - .equals(other.getReservedNameList()); - result = result && unknownFields.equals(other.unknownFields); - return result; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasName()) { - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - } - if (getFieldCount() > 0) { - hash = (37 * hash) + FIELD_FIELD_NUMBER; - hash = (53 * hash) + getFieldList().hashCode(); - } - if (getExtensionCount() > 0) { - hash = (37 * hash) + EXTENSION_FIELD_NUMBER; - hash = (53 * hash) + getExtensionList().hashCode(); - } - if (getNestedTypeCount() > 0) { - hash = (37 * hash) + NESTED_TYPE_FIELD_NUMBER; - hash = (53 * hash) + getNestedTypeList().hashCode(); - } - if (getEnumTypeCount() > 0) { - hash = (37 * hash) + ENUM_TYPE_FIELD_NUMBER; - hash = (53 * hash) + getEnumTypeList().hashCode(); - } - if (getExtensionRangeCount() > 0) { - hash = (37 * hash) + EXTENSION_RANGE_FIELD_NUMBER; - hash = (53 * hash) + getExtensionRangeList().hashCode(); - } - if (getOneofDeclCount() > 0) { - hash = (37 * hash) + ONEOF_DECL_FIELD_NUMBER; - hash = (53 * hash) + getOneofDeclList().hashCode(); - } - if (hasOptions()) { - hash = (37 * hash) + OPTIONS_FIELD_NUMBER; - hash = (53 * hash) + getOptions().hashCode(); - } - if (getReservedRangeCount() > 0) { - hash = (37 * hash) + RESERVED_RANGE_FIELD_NUMBER; - hash = (53 * hash) + getReservedRangeList().hashCode(); - } - if (getReservedNameCount() > 0) { - hash = (37 * hash) + RESERVED_NAME_FIELD_NUMBER; - hash = (53 * hash) + getReservedNameList().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom(byte[] data) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( - byte[] data, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseDelimitedFrom( - java.io.InputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-     * Describes a message type.
-     * 
- * - * Protobuf type {@code google.protobuf.DescriptorProto} - */ - public static final class Builder extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:google.protobuf.DescriptorProto) - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder { - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getFieldFieldBuilder(); - getExtensionFieldBuilder(); - getNestedTypeFieldBuilder(); - getEnumTypeFieldBuilder(); - getExtensionRangeFieldBuilder(); - getOneofDeclFieldBuilder(); - getOptionsFieldBuilder(); - getReservedRangeFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - name_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - if (fieldBuilder_ == null) { - field_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - fieldBuilder_.clear(); - } - if (extensionBuilder_ == null) { - extension_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - } else { - extensionBuilder_.clear(); - } - if (nestedTypeBuilder_ == null) { - nestedType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - nestedTypeBuilder_.clear(); - } - if (enumTypeBuilder_ == null) { - enumType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - } else { - enumTypeBuilder_.clear(); - } - if (extensionRangeBuilder_ == null) { - extensionRange_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - } else { - extensionRangeBuilder_.clear(); - } - if (oneofDeclBuilder_ == null) { - oneofDecl_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000040); - } else { - oneofDeclBuilder_.clear(); - } - if (optionsBuilder_ == null) { - options_ = null; - } else { - optionsBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - if (reservedRangeBuilder_ == null) { - reservedRange_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000100); - } else { - reservedRangeBuilder_.clear(); - } - reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000200); - return this; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_DescriptorProto_descriptor; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getDefaultInstanceForType() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto build() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto buildPartial() { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto result = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.name_ = name_; - if (fieldBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - field_ = java.util.Collections.unmodifiableList(field_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.field_ = field_; - } else { - result.field_ = fieldBuilder_.build(); - } - if (extensionBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004)) { - extension_ = java.util.Collections.unmodifiableList(extension_); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.extension_ = extension_; - } else { - result.extension_ = extensionBuilder_.build(); - } - if (nestedTypeBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - nestedType_ = java.util.Collections.unmodifiableList(nestedType_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.nestedType_ = nestedType_; - } else { - result.nestedType_ = nestedTypeBuilder_.build(); - } - if (enumTypeBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010)) { - enumType_ = java.util.Collections.unmodifiableList(enumType_); - bitField0_ = (bitField0_ & ~0x00000010); - } - result.enumType_ = enumType_; - } else { - result.enumType_ = enumTypeBuilder_.build(); - } - if (extensionRangeBuilder_ == null) { - if (((bitField0_ & 0x00000020) == 0x00000020)) { - extensionRange_ = java.util.Collections.unmodifiableList(extensionRange_); - bitField0_ = (bitField0_ & ~0x00000020); - } - result.extensionRange_ = extensionRange_; - } else { - result.extensionRange_ = extensionRangeBuilder_.build(); - } - if (oneofDeclBuilder_ == null) { - if (((bitField0_ & 0x00000040) == 0x00000040)) { - oneofDecl_ = java.util.Collections.unmodifiableList(oneofDecl_); - bitField0_ = (bitField0_ & ~0x00000040); - } - result.oneofDecl_ = oneofDecl_; - } else { - result.oneofDecl_ = oneofDeclBuilder_.build(); - } - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000002; - } - if (optionsBuilder_ == null) { - result.options_ = options_; - } else { - result.options_ = optionsBuilder_.build(); - } - if (reservedRangeBuilder_ == null) { - if (((bitField0_ & 0x00000100) == 0x00000100)) { - reservedRange_ = java.util.Collections.unmodifiableList(reservedRange_); - bitField0_ = (bitField0_ & ~0x00000100); - } - result.reservedRange_ = reservedRange_; - } else { - result.reservedRange_ = reservedRangeBuilder_.build(); - } - if (((bitField0_ & 0x00000200) == 0x00000200)) { - reservedName_ = reservedName_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000200); - } - result.reservedName_ = reservedName_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder clone() { - return (Builder) super.clone(); - } - public Builder setField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.setField(field, value); - } - public Builder clearField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { - return (Builder) super.clearField(field); - } - public Builder clearOneof( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return (Builder) super.clearOneof(oneof); - } - public Builder setRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - int index, Object value) { - return (Builder) super.setRepeatedField(field, index, value); - } - public Builder addRepeatedField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, - Object value) { - return (Builder) super.addRepeatedField(field, value); - } - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto) { - return mergeFrom((org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto other) { - if (other == org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()) return this; - if (other.hasName()) { - bitField0_ |= 0x00000001; - name_ = other.name_; - onChanged(); - } - if (fieldBuilder_ == null) { - if (!other.field_.isEmpty()) { - if (field_.isEmpty()) { - field_ = other.field_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFieldIsMutable(); - field_.addAll(other.field_); - } - onChanged(); - } - } else { - if (!other.field_.isEmpty()) { - if (fieldBuilder_.isEmpty()) { - fieldBuilder_.dispose(); - fieldBuilder_ = null; - field_ = other.field_; - bitField0_ = (bitField0_ & ~0x00000002); - fieldBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getFieldFieldBuilder() : null; - } else { - fieldBuilder_.addAllMessages(other.field_); - } - } - } - if (extensionBuilder_ == null) { - if (!other.extension_.isEmpty()) { - if (extension_.isEmpty()) { - extension_ = other.extension_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureExtensionIsMutable(); - extension_.addAll(other.extension_); - } - onChanged(); - } - } else { - if (!other.extension_.isEmpty()) { - if (extensionBuilder_.isEmpty()) { - extensionBuilder_.dispose(); - extensionBuilder_ = null; - extension_ = other.extension_; - bitField0_ = (bitField0_ & ~0x00000004); - extensionBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getExtensionFieldBuilder() : null; - } else { - extensionBuilder_.addAllMessages(other.extension_); - } - } - } - if (nestedTypeBuilder_ == null) { - if (!other.nestedType_.isEmpty()) { - if (nestedType_.isEmpty()) { - nestedType_ = other.nestedType_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensureNestedTypeIsMutable(); - nestedType_.addAll(other.nestedType_); - } - onChanged(); - } - } else { - if (!other.nestedType_.isEmpty()) { - if (nestedTypeBuilder_.isEmpty()) { - nestedTypeBuilder_.dispose(); - nestedTypeBuilder_ = null; - nestedType_ = other.nestedType_; - bitField0_ = (bitField0_ & ~0x00000008); - nestedTypeBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getNestedTypeFieldBuilder() : null; - } else { - nestedTypeBuilder_.addAllMessages(other.nestedType_); - } - } - } - if (enumTypeBuilder_ == null) { - if (!other.enumType_.isEmpty()) { - if (enumType_.isEmpty()) { - enumType_ = other.enumType_; - bitField0_ = (bitField0_ & ~0x00000010); - } else { - ensureEnumTypeIsMutable(); - enumType_.addAll(other.enumType_); - } - onChanged(); - } - } else { - if (!other.enumType_.isEmpty()) { - if (enumTypeBuilder_.isEmpty()) { - enumTypeBuilder_.dispose(); - enumTypeBuilder_ = null; - enumType_ = other.enumType_; - bitField0_ = (bitField0_ & ~0x00000010); - enumTypeBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getEnumTypeFieldBuilder() : null; - } else { - enumTypeBuilder_.addAllMessages(other.enumType_); - } - } - } - if (extensionRangeBuilder_ == null) { - if (!other.extensionRange_.isEmpty()) { - if (extensionRange_.isEmpty()) { - extensionRange_ = other.extensionRange_; - bitField0_ = (bitField0_ & ~0x00000020); - } else { - ensureExtensionRangeIsMutable(); - extensionRange_.addAll(other.extensionRange_); - } - onChanged(); - } - } else { - if (!other.extensionRange_.isEmpty()) { - if (extensionRangeBuilder_.isEmpty()) { - extensionRangeBuilder_.dispose(); - extensionRangeBuilder_ = null; - extensionRange_ = other.extensionRange_; - bitField0_ = (bitField0_ & ~0x00000020); - extensionRangeBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getExtensionRangeFieldBuilder() : null; - } else { - extensionRangeBuilder_.addAllMessages(other.extensionRange_); - } - } - } - if (oneofDeclBuilder_ == null) { - if (!other.oneofDecl_.isEmpty()) { - if (oneofDecl_.isEmpty()) { - oneofDecl_ = other.oneofDecl_; - bitField0_ = (bitField0_ & ~0x00000040); - } else { - ensureOneofDeclIsMutable(); - oneofDecl_.addAll(other.oneofDecl_); - } - onChanged(); - } - } else { - if (!other.oneofDecl_.isEmpty()) { - if (oneofDeclBuilder_.isEmpty()) { - oneofDeclBuilder_.dispose(); - oneofDeclBuilder_ = null; - oneofDecl_ = other.oneofDecl_; - bitField0_ = (bitField0_ & ~0x00000040); - oneofDeclBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getOneofDeclFieldBuilder() : null; - } else { - oneofDeclBuilder_.addAllMessages(other.oneofDecl_); - } - } - } - if (other.hasOptions()) { - mergeOptions(other.getOptions()); - } - if (reservedRangeBuilder_ == null) { - if (!other.reservedRange_.isEmpty()) { - if (reservedRange_.isEmpty()) { - reservedRange_ = other.reservedRange_; - bitField0_ = (bitField0_ & ~0x00000100); - } else { - ensureReservedRangeIsMutable(); - reservedRange_.addAll(other.reservedRange_); - } - onChanged(); - } - } else { - if (!other.reservedRange_.isEmpty()) { - if (reservedRangeBuilder_.isEmpty()) { - reservedRangeBuilder_.dispose(); - reservedRangeBuilder_ = null; - reservedRange_ = other.reservedRange_; - bitField0_ = (bitField0_ & ~0x00000100); - reservedRangeBuilder_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getReservedRangeFieldBuilder() : null; - } else { - reservedRangeBuilder_.addAllMessages(other.reservedRange_); - } - } - } - if (!other.reservedName_.isEmpty()) { - if (reservedName_.isEmpty()) { - reservedName_ = other.reservedName_; - bitField0_ = (bitField0_ & ~0x00000200); - } else { - ensureReservedNameIsMutable(); - reservedName_.addAll(other.reservedName_); - } - onChanged(); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getFieldCount(); i++) { - if (!getField(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getExtensionCount(); i++) { - if (!getExtension(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getNestedTypeCount(); i++) { - if (!getNestedType(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getEnumTypeCount(); i++) { - if (!getEnumType(i).isInitialized()) { - return false; - } - } - for (int i = 0; i < getOneofDeclCount(); i++) { - if (!getOneofDecl(i).isInitialized()) { - return false; - } - } - if (hasOptions()) { - if (!getOptions().isInitialized()) { - return false; - } - } - return true; - } - - public Builder mergeFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.lang.Object name_ = ""; - /** - * optional string name = 1; - */ - public boolean hasName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string name = 1; - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = - (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string name = 1; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; - } - } - /** - * optional string name = 1; - */ - public Builder setName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - /** - * optional string name = 1; - */ - public Builder clearName() { - bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - /** - * optional string name = 1; - */ - public Builder setNameBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - - private java.util.List field_ = - java.util.Collections.emptyList(); - private void ensureFieldIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - field_ = new java.util.ArrayList(field_); - bitField0_ |= 0x00000002; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> fieldBuilder_; - - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public java.util.List getFieldList() { - if (fieldBuilder_ == null) { - return java.util.Collections.unmodifiableList(field_); - } else { - return fieldBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public int getFieldCount() { - if (fieldBuilder_ == null) { - return field_.size(); - } else { - return fieldBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getField(int index) { - if (fieldBuilder_ == null) { - return field_.get(index); - } else { - return fieldBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder setField( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (fieldBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFieldIsMutable(); - field_.set(index, value); - onChanged(); - } else { - fieldBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder setField( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (fieldBuilder_ == null) { - ensureFieldIsMutable(); - field_.set(index, builderForValue.build()); - onChanged(); - } else { - fieldBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder addField(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (fieldBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFieldIsMutable(); - field_.add(value); - onChanged(); - } else { - fieldBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder addField( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (fieldBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFieldIsMutable(); - field_.add(index, value); - onChanged(); - } else { - fieldBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder addField( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (fieldBuilder_ == null) { - ensureFieldIsMutable(); - field_.add(builderForValue.build()); - onChanged(); - } else { - fieldBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder addField( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (fieldBuilder_ == null) { - ensureFieldIsMutable(); - field_.add(index, builderForValue.build()); - onChanged(); - } else { - fieldBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder addAllField( - java.lang.Iterable values) { - if (fieldBuilder_ == null) { - ensureFieldIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, field_); - onChanged(); - } else { - fieldBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder clearField() { - if (fieldBuilder_ == null) { - field_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - fieldBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public Builder removeField(int index) { - if (fieldBuilder_ == null) { - ensureFieldIsMutable(); - field_.remove(index); - onChanged(); - } else { - fieldBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder getFieldBuilder( - int index) { - return getFieldFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getFieldOrBuilder( - int index) { - if (fieldBuilder_ == null) { - return field_.get(index); } else { - return fieldBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public java.util.List - getFieldOrBuilderList() { - if (fieldBuilder_ != null) { - return fieldBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(field_); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addFieldBuilder() { - return getFieldFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addFieldBuilder( - int index) { - return getFieldFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FieldDescriptorProto field = 2; - */ - public java.util.List - getFieldBuilderList() { - return getFieldFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> - getFieldFieldBuilder() { - if (fieldBuilder_ == null) { - fieldBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder>( - field_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - field_ = null; - } - return fieldBuilder_; - } - - private java.util.List extension_ = - java.util.Collections.emptyList(); - private void ensureExtensionIsMutable() { - if (!((bitField0_ & 0x00000004) == 0x00000004)) { - extension_ = new java.util.ArrayList(extension_); - bitField0_ |= 0x00000004; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> extensionBuilder_; - - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public java.util.List getExtensionList() { - if (extensionBuilder_ == null) { - return java.util.Collections.unmodifiableList(extension_); - } else { - return extensionBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public int getExtensionCount() { - if (extensionBuilder_ == null) { - return extension_.size(); - } else { - return extensionBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto getExtension(int index) { - if (extensionBuilder_ == null) { - return extension_.get(index); - } else { - return extensionBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder setExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (extensionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionIsMutable(); - extension_.set(index, value); - onChanged(); - } else { - extensionBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder setExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.set(index, builderForValue.build()); - onChanged(); - } else { - extensionBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder addExtension(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (extensionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionIsMutable(); - extension_.add(value); - onChanged(); - } else { - extensionBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder addExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto value) { - if (extensionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionIsMutable(); - extension_.add(index, value); - onChanged(); - } else { - extensionBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder addExtension( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.add(builderForValue.build()); - onChanged(); - } else { - extensionBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder addExtension( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder builderForValue) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.add(index, builderForValue.build()); - onChanged(); - } else { - extensionBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder addAllExtension( - java.lang.Iterable values) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, extension_); - onChanged(); - } else { - extensionBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder clearExtension() { - if (extensionBuilder_ == null) { - extension_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - } else { - extensionBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public Builder removeExtension(int index) { - if (extensionBuilder_ == null) { - ensureExtensionIsMutable(); - extension_.remove(index); - onChanged(); - } else { - extensionBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder getExtensionBuilder( - int index) { - return getExtensionFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder getExtensionOrBuilder( - int index) { - if (extensionBuilder_ == null) { - return extension_.get(index); } else { - return extensionBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public java.util.List - getExtensionOrBuilderList() { - if (extensionBuilder_ != null) { - return extensionBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(extension_); - } - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder() { - return getExtensionFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder addExtensionBuilder( - int index) { - return getExtensionFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.FieldDescriptorProto extension = 6; - */ - public java.util.List - getExtensionBuilderList() { - return getExtensionFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder> - getExtensionFieldBuilder() { - if (extensionBuilder_ == null) { - extensionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProtoOrBuilder>( - extension_, - ((bitField0_ & 0x00000004) == 0x00000004), - getParentForChildren(), - isClean()); - extension_ = null; - } - return extensionBuilder_; - } - - private java.util.List nestedType_ = - java.util.Collections.emptyList(); - private void ensureNestedTypeIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - nestedType_ = new java.util.ArrayList(nestedType_); - bitField0_ |= 0x00000008; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> nestedTypeBuilder_; - - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public java.util.List getNestedTypeList() { - if (nestedTypeBuilder_ == null) { - return java.util.Collections.unmodifiableList(nestedType_); - } else { - return nestedTypeBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public int getNestedTypeCount() { - if (nestedTypeBuilder_ == null) { - return nestedType_.size(); - } else { - return nestedTypeBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getNestedType(int index) { - if (nestedTypeBuilder_ == null) { - return nestedType_.get(index); - } else { - return nestedTypeBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder setNestedType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { - if (nestedTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureNestedTypeIsMutable(); - nestedType_.set(index, value); - onChanged(); - } else { - nestedTypeBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder setNestedType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { - if (nestedTypeBuilder_ == null) { - ensureNestedTypeIsMutable(); - nestedType_.set(index, builderForValue.build()); - onChanged(); - } else { - nestedTypeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder addNestedType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { - if (nestedTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureNestedTypeIsMutable(); - nestedType_.add(value); - onChanged(); - } else { - nestedTypeBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder addNestedType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto value) { - if (nestedTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureNestedTypeIsMutable(); - nestedType_.add(index, value); - onChanged(); - } else { - nestedTypeBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder addNestedType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { - if (nestedTypeBuilder_ == null) { - ensureNestedTypeIsMutable(); - nestedType_.add(builderForValue.build()); - onChanged(); - } else { - nestedTypeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder addNestedType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder builderForValue) { - if (nestedTypeBuilder_ == null) { - ensureNestedTypeIsMutable(); - nestedType_.add(index, builderForValue.build()); - onChanged(); - } else { - nestedTypeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder addAllNestedType( - java.lang.Iterable values) { - if (nestedTypeBuilder_ == null) { - ensureNestedTypeIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, nestedType_); - onChanged(); - } else { - nestedTypeBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder clearNestedType() { - if (nestedTypeBuilder_ == null) { - nestedType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - nestedTypeBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public Builder removeNestedType(int index) { - if (nestedTypeBuilder_ == null) { - ensureNestedTypeIsMutable(); - nestedType_.remove(index); - onChanged(); - } else { - nestedTypeBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder getNestedTypeBuilder( - int index) { - return getNestedTypeFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder getNestedTypeOrBuilder( - int index) { - if (nestedTypeBuilder_ == null) { - return nestedType_.get(index); } else { - return nestedTypeBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public java.util.List - getNestedTypeOrBuilderList() { - if (nestedTypeBuilder_ != null) { - return nestedTypeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(nestedType_); - } - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addNestedTypeBuilder() { - return getNestedTypeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder addNestedTypeBuilder( - int index) { - return getNestedTypeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.DescriptorProto nested_type = 3; - */ - public java.util.List - getNestedTypeBuilderList() { - return getNestedTypeFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder> - getNestedTypeFieldBuilder() { - if (nestedTypeBuilder_ == null) { - nestedTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProtoOrBuilder>( - nestedType_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - nestedType_ = null; - } - return nestedTypeBuilder_; - } - - private java.util.List enumType_ = - java.util.Collections.emptyList(); - private void ensureEnumTypeIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - enumType_ = new java.util.ArrayList(enumType_); - bitField0_ |= 0x00000010; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> enumTypeBuilder_; - - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public java.util.List getEnumTypeList() { - if (enumTypeBuilder_ == null) { - return java.util.Collections.unmodifiableList(enumType_); - } else { - return enumTypeBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public int getEnumTypeCount() { - if (enumTypeBuilder_ == null) { - return enumType_.size(); - } else { - return enumTypeBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto getEnumType(int index) { - if (enumTypeBuilder_ == null) { - return enumType_.get(index); - } else { - return enumTypeBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder setEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { - if (enumTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEnumTypeIsMutable(); - enumType_.set(index, value); - onChanged(); - } else { - enumTypeBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder setEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.set(index, builderForValue.build()); - onChanged(); - } else { - enumTypeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder addEnumType(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { - if (enumTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEnumTypeIsMutable(); - enumType_.add(value); - onChanged(); - } else { - enumTypeBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder addEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto value) { - if (enumTypeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEnumTypeIsMutable(); - enumType_.add(index, value); - onChanged(); - } else { - enumTypeBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder addEnumType( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.add(builderForValue.build()); - onChanged(); - } else { - enumTypeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder addEnumType( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder builderForValue) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.add(index, builderForValue.build()); - onChanged(); - } else { - enumTypeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder addAllEnumType( - java.lang.Iterable values) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, enumType_); - onChanged(); - } else { - enumTypeBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder clearEnumType() { - if (enumTypeBuilder_ == null) { - enumType_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); - onChanged(); - } else { - enumTypeBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public Builder removeEnumType(int index) { - if (enumTypeBuilder_ == null) { - ensureEnumTypeIsMutable(); - enumType_.remove(index); - onChanged(); - } else { - enumTypeBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder getEnumTypeBuilder( - int index) { - return getEnumTypeFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder getEnumTypeOrBuilder( - int index) { - if (enumTypeBuilder_ == null) { - return enumType_.get(index); } else { - return enumTypeBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public java.util.List - getEnumTypeOrBuilderList() { - if (enumTypeBuilder_ != null) { - return enumTypeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(enumType_); - } - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder() { - return getEnumTypeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder addEnumTypeBuilder( - int index) { - return getEnumTypeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.EnumDescriptorProto enum_type = 4; - */ - public java.util.List - getEnumTypeBuilderList() { - return getEnumTypeFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder> - getEnumTypeFieldBuilder() { - if (enumTypeBuilder_ == null) { - enumTypeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProtoOrBuilder>( - enumType_, - ((bitField0_ & 0x00000010) == 0x00000010), - getParentForChildren(), - isClean()); - enumType_ = null; - } - return enumTypeBuilder_; - } - - private java.util.List extensionRange_ = - java.util.Collections.emptyList(); - private void ensureExtensionRangeIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - extensionRange_ = new java.util.ArrayList(extensionRange_); - bitField0_ |= 0x00000020; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> extensionRangeBuilder_; - - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public java.util.List getExtensionRangeList() { - if (extensionRangeBuilder_ == null) { - return java.util.Collections.unmodifiableList(extensionRange_); - } else { - return extensionRangeBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public int getExtensionRangeCount() { - if (extensionRangeBuilder_ == null) { - return extensionRange_.size(); - } else { - return extensionRangeBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange getExtensionRange(int index) { - if (extensionRangeBuilder_ == null) { - return extensionRange_.get(index); - } else { - return extensionRangeBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder setExtensionRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange value) { - if (extensionRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionRangeIsMutable(); - extensionRange_.set(index, value); - onChanged(); - } else { - extensionRangeBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder setExtensionRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder builderForValue) { - if (extensionRangeBuilder_ == null) { - ensureExtensionRangeIsMutable(); - extensionRange_.set(index, builderForValue.build()); - onChanged(); - } else { - extensionRangeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder addExtensionRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange value) { - if (extensionRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionRangeIsMutable(); - extensionRange_.add(value); - onChanged(); - } else { - extensionRangeBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder addExtensionRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange value) { - if (extensionRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureExtensionRangeIsMutable(); - extensionRange_.add(index, value); - onChanged(); - } else { - extensionRangeBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder addExtensionRange( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder builderForValue) { - if (extensionRangeBuilder_ == null) { - ensureExtensionRangeIsMutable(); - extensionRange_.add(builderForValue.build()); - onChanged(); - } else { - extensionRangeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder addExtensionRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder builderForValue) { - if (extensionRangeBuilder_ == null) { - ensureExtensionRangeIsMutable(); - extensionRange_.add(index, builderForValue.build()); - onChanged(); - } else { - extensionRangeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder addAllExtensionRange( - java.lang.Iterable values) { - if (extensionRangeBuilder_ == null) { - ensureExtensionRangeIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, extensionRange_); - onChanged(); - } else { - extensionRangeBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder clearExtensionRange() { - if (extensionRangeBuilder_ == null) { - extensionRange_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - } else { - extensionRangeBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public Builder removeExtensionRange(int index) { - if (extensionRangeBuilder_ == null) { - ensureExtensionRangeIsMutable(); - extensionRange_.remove(index); - onChanged(); - } else { - extensionRangeBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder getExtensionRangeBuilder( - int index) { - return getExtensionRangeFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder getExtensionRangeOrBuilder( - int index) { - if (extensionRangeBuilder_ == null) { - return extensionRange_.get(index); } else { - return extensionRangeBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public java.util.List - getExtensionRangeOrBuilderList() { - if (extensionRangeBuilder_ != null) { - return extensionRangeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(extensionRange_); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder addExtensionRangeBuilder() { - return getExtensionRangeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance()); - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder addExtensionRangeBuilder( - int index) { - return getExtensionRangeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.getDefaultInstance()); - } - /** - * repeated .google.protobuf.DescriptorProto.ExtensionRange extension_range = 5; - */ - public java.util.List - getExtensionRangeBuilderList() { - return getExtensionRangeFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder> - getExtensionRangeFieldBuilder() { - if (extensionRangeBuilder_ == null) { - extensionRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRangeOrBuilder>( - extensionRange_, - ((bitField0_ & 0x00000020) == 0x00000020), - getParentForChildren(), - isClean()); - extensionRange_ = null; - } - return extensionRangeBuilder_; - } - - private java.util.List oneofDecl_ = - java.util.Collections.emptyList(); - private void ensureOneofDeclIsMutable() { - if (!((bitField0_ & 0x00000040) == 0x00000040)) { - oneofDecl_ = new java.util.ArrayList(oneofDecl_); - bitField0_ |= 0x00000040; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> oneofDeclBuilder_; - - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public java.util.List getOneofDeclList() { - if (oneofDeclBuilder_ == null) { - return java.util.Collections.unmodifiableList(oneofDecl_); - } else { - return oneofDeclBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public int getOneofDeclCount() { - if (oneofDeclBuilder_ == null) { - return oneofDecl_.size(); - } else { - return oneofDeclBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto getOneofDecl(int index) { - if (oneofDeclBuilder_ == null) { - return oneofDecl_.get(index); - } else { - return oneofDeclBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder setOneofDecl( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto value) { - if (oneofDeclBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOneofDeclIsMutable(); - oneofDecl_.set(index, value); - onChanged(); - } else { - oneofDeclBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder setOneofDecl( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder builderForValue) { - if (oneofDeclBuilder_ == null) { - ensureOneofDeclIsMutable(); - oneofDecl_.set(index, builderForValue.build()); - onChanged(); - } else { - oneofDeclBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder addOneofDecl(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto value) { - if (oneofDeclBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOneofDeclIsMutable(); - oneofDecl_.add(value); - onChanged(); - } else { - oneofDeclBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder addOneofDecl( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto value) { - if (oneofDeclBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureOneofDeclIsMutable(); - oneofDecl_.add(index, value); - onChanged(); - } else { - oneofDeclBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder addOneofDecl( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder builderForValue) { - if (oneofDeclBuilder_ == null) { - ensureOneofDeclIsMutable(); - oneofDecl_.add(builderForValue.build()); - onChanged(); - } else { - oneofDeclBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder addOneofDecl( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder builderForValue) { - if (oneofDeclBuilder_ == null) { - ensureOneofDeclIsMutable(); - oneofDecl_.add(index, builderForValue.build()); - onChanged(); - } else { - oneofDeclBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder addAllOneofDecl( - java.lang.Iterable values) { - if (oneofDeclBuilder_ == null) { - ensureOneofDeclIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, oneofDecl_); - onChanged(); - } else { - oneofDeclBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder clearOneofDecl() { - if (oneofDeclBuilder_ == null) { - oneofDecl_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000040); - onChanged(); - } else { - oneofDeclBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public Builder removeOneofDecl(int index) { - if (oneofDeclBuilder_ == null) { - ensureOneofDeclIsMutable(); - oneofDecl_.remove(index); - onChanged(); - } else { - oneofDeclBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder getOneofDeclBuilder( - int index) { - return getOneofDeclFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder getOneofDeclOrBuilder( - int index) { - if (oneofDeclBuilder_ == null) { - return oneofDecl_.get(index); } else { - return oneofDeclBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public java.util.List - getOneofDeclOrBuilderList() { - if (oneofDeclBuilder_ != null) { - return oneofDeclBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(oneofDecl_); - } - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder addOneofDeclBuilder() { - return getOneofDeclFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder addOneofDeclBuilder( - int index) { - return getOneofDeclFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.getDefaultInstance()); - } - /** - * repeated .google.protobuf.OneofDescriptorProto oneof_decl = 8; - */ - public java.util.List - getOneofDeclBuilderList() { - return getOneofDeclFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder> - getOneofDeclFieldBuilder() { - if (oneofDeclBuilder_ == null) { - oneofDeclBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProtoOrBuilder>( - oneofDecl_, - ((bitField0_ & 0x00000040) == 0x00000040), - getParentForChildren(), - isClean()); - oneofDecl_ = null; - } - return oneofDeclBuilder_; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions options_ = null; - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder> optionsBuilder_; - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public boolean hasOptions() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions getOptions() { - if (optionsBuilder_ == null) { - return options_ == null ? org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; - } else { - return optionsBuilder_.getMessage(); - } - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public Builder setOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions value) { - if (optionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - options_ = value; - onChanged(); - } else { - optionsBuilder_.setMessage(value); - } - bitField0_ |= 0x00000080; - return this; - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public Builder setOptions( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder builderForValue) { - if (optionsBuilder_ == null) { - options_ = builderForValue.build(); - onChanged(); - } else { - optionsBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000080; - return this; - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public Builder mergeOptions(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions value) { - if (optionsBuilder_ == null) { - if (((bitField0_ & 0x00000080) == 0x00000080) && - options_ != null && - options_ != org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance()) { - options_ = - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.newBuilder(options_).mergeFrom(value).buildPartial(); - } else { - options_ = value; - } - onChanged(); - } else { - optionsBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000080; - return this; - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public Builder clearOptions() { - if (optionsBuilder_ == null) { - options_ = null; - onChanged(); - } else { - optionsBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder getOptionsBuilder() { - bitField0_ |= 0x00000080; - onChanged(); - return getOptionsFieldBuilder().getBuilder(); - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder getOptionsOrBuilder() { - if (optionsBuilder_ != null) { - return optionsBuilder_.getMessageOrBuilder(); - } else { - return options_ == null ? - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.getDefaultInstance() : options_; - } - } - /** - * optional .google.protobuf.MessageOptions options = 7; - */ - private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder> - getOptionsFieldBuilder() { - if (optionsBuilder_ == null) { - optionsBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptionsOrBuilder>( - getOptions(), - getParentForChildren(), - isClean()); - options_ = null; - } - return optionsBuilder_; - } - - private java.util.List reservedRange_ = - java.util.Collections.emptyList(); - private void ensureReservedRangeIsMutable() { - if (!((bitField0_ & 0x00000100) == 0x00000100)) { - reservedRange_ = new java.util.ArrayList(reservedRange_); - bitField0_ |= 0x00000100; - } - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> reservedRangeBuilder_; - - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public java.util.List getReservedRangeList() { - if (reservedRangeBuilder_ == null) { - return java.util.Collections.unmodifiableList(reservedRange_); - } else { - return reservedRangeBuilder_.getMessageList(); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public int getReservedRangeCount() { - if (reservedRangeBuilder_ == null) { - return reservedRange_.size(); - } else { - return reservedRangeBuilder_.getCount(); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange getReservedRange(int index) { - if (reservedRangeBuilder_ == null) { - return reservedRange_.get(index); - } else { - return reservedRangeBuilder_.getMessage(index); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder setReservedRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange value) { - if (reservedRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureReservedRangeIsMutable(); - reservedRange_.set(index, value); - onChanged(); - } else { - reservedRangeBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder setReservedRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder builderForValue) { - if (reservedRangeBuilder_ == null) { - ensureReservedRangeIsMutable(); - reservedRange_.set(index, builderForValue.build()); - onChanged(); - } else { - reservedRangeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder addReservedRange(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange value) { - if (reservedRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureReservedRangeIsMutable(); - reservedRange_.add(value); - onChanged(); - } else { - reservedRangeBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder addReservedRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange value) { - if (reservedRangeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureReservedRangeIsMutable(); - reservedRange_.add(index, value); - onChanged(); - } else { - reservedRangeBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder addReservedRange( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder builderForValue) { - if (reservedRangeBuilder_ == null) { - ensureReservedRangeIsMutable(); - reservedRange_.add(builderForValue.build()); - onChanged(); - } else { - reservedRangeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder addReservedRange( - int index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder builderForValue) { - if (reservedRangeBuilder_ == null) { - ensureReservedRangeIsMutable(); - reservedRange_.add(index, builderForValue.build()); - onChanged(); - } else { - reservedRangeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder addAllReservedRange( - java.lang.Iterable values) { - if (reservedRangeBuilder_ == null) { - ensureReservedRangeIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, reservedRange_); - onChanged(); - } else { - reservedRangeBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder clearReservedRange() { - if (reservedRangeBuilder_ == null) { - reservedRange_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000100); - onChanged(); - } else { - reservedRangeBuilder_.clear(); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public Builder removeReservedRange(int index) { - if (reservedRangeBuilder_ == null) { - ensureReservedRangeIsMutable(); - reservedRange_.remove(index); - onChanged(); - } else { - reservedRangeBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder getReservedRangeBuilder( - int index) { - return getReservedRangeFieldBuilder().getBuilder(index); - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder getReservedRangeOrBuilder( - int index) { - if (reservedRangeBuilder_ == null) { - return reservedRange_.get(index); } else { - return reservedRangeBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public java.util.List - getReservedRangeOrBuilderList() { - if (reservedRangeBuilder_ != null) { - return reservedRangeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(reservedRange_); - } - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder addReservedRangeBuilder() { - return getReservedRangeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance()); - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder addReservedRangeBuilder( - int index) { - return getReservedRangeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.getDefaultInstance()); - } - /** - * repeated .google.protobuf.DescriptorProto.ReservedRange reserved_range = 9; - */ - public java.util.List - getReservedRangeBuilderList() { - return getReservedRangeFieldBuilder().getBuilderList(); - } - private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder> - getReservedRangeFieldBuilder() { - if (reservedRangeBuilder_ == null) { - reservedRangeBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRangeOrBuilder>( - reservedRange_, - ((bitField0_ & 0x00000100) == 0x00000100), - getParentForChildren(), - isClean()); - reservedRange_ = null; - } - return reservedRangeBuilder_; - } - - private org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringList reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureReservedNameIsMutable() { - if (!((bitField0_ & 0x00000200) == 0x00000200)) { - reservedName_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList(reservedName_); - bitField0_ |= 0x00000200; - } - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolStringList - getReservedNameList() { - return reservedName_.getUnmodifiableView(); - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public int getReservedNameCount() { - return reservedName_.size(); - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public java.lang.String getReservedName(int index) { - return reservedName_.get(index); - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getReservedNameBytes(int index) { - return reservedName_.getByteString(index); - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public Builder setReservedName( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureReservedNameIsMutable(); - reservedName_.set(index, value); - onChanged(); - return this; - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public Builder addReservedName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureReservedNameIsMutable(); - reservedName_.add(value); - onChanged(); - return this; - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public Builder addAllReservedName( - java.lang.Iterable values) { - ensureReservedNameIsMutable(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, reservedName_); - onChanged(); - return this; - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public Builder clearReservedName() { - reservedName_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000200); - onChanged(); - return this; - } - /** - *
-       * Reserved field names, which may not be used by fields in the same message.
-       * A given name may only be reserved once.
-       * 
- * - * repeated string reserved_name = 10; - */ - public Builder addReservedNameBytes( - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureReservedNameIsMutable(); - reservedName_.add(value); - onChanged(); - return this; - } - public final Builder setUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - public final Builder mergeUnknownFields( - final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:google.protobuf.DescriptorProto) - } - - // @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto) - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto(); - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser - PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser() { - public DescriptorProto parsePartialFrom( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - return new DescriptorProto(input, extensionRegistry); - } - }; - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface FieldDescriptorProtoOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.protobuf.FieldDescriptorProto) - org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { - - /** - * optional string name = 1; - */ - boolean hasName(); - /** - * optional string name = 1; - */ - java.lang.String getName(); - /** - * optional string name = 1; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getNameBytes(); - - /** - * optional int32 number = 3; - */ - boolean hasNumber(); - /** - * optional int32 number = 3; - */ - int getNumber(); - - /** - * optional .google.protobuf.FieldDescriptorProto.Label label = 4; - */ - boolean hasLabel(); - /** - * optional .google.protobuf.FieldDescriptorProto.Label label = 4; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label getLabel(); - - /** - *
-     * If type_name is set, this need not be set.  If both this and type_name
-     * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
-     * 
- * - * optional .google.protobuf.FieldDescriptorProto.Type type = 5; - */ - boolean hasType(); - /** - *
-     * If type_name is set, this need not be set.  If both this and type_name
-     * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
-     * 
- * - * optional .google.protobuf.FieldDescriptorProto.Type type = 5; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type getType(); - - /** - *
-     * For message and enum types, this is the name of the type.  If the name
-     * starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
-     * rules are used to find the type (i.e. first the nested types within this
-     * message are searched, then within the parent, on up to the root
-     * namespace).
-     * 
- * - * optional string type_name = 6; - */ - boolean hasTypeName(); - /** - *
-     * For message and enum types, this is the name of the type.  If the name
-     * starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
-     * rules are used to find the type (i.e. first the nested types within this
-     * message are searched, then within the parent, on up to the root
-     * namespace).
-     * 
- * - * optional string type_name = 6; - */ - java.lang.String getTypeName(); - /** - *
-     * For message and enum types, this is the name of the type.  If the name
-     * starts with a '.', it is fully-qualified.  Otherwise, C++-like scoping
-     * rules are used to find the type (i.e. first the nested types within this
-     * message are searched, then within the parent, on up to the root
-     * namespace).
-     * 
- * - * optional string type_name = 6; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getTypeNameBytes(); - - /** - *
-     * For extensions, this is the name of the type being extended.  It is
-     * resolved in the same manner as type_name.
-     * 
- * - * optional string extendee = 2; - */ - boolean hasExtendee(); - /** - *
-     * For extensions, this is the name of the type being extended.  It is
-     * resolved in the same manner as type_name.
-     * 
- * - * optional string extendee = 2; - */ - java.lang.String getExtendee(); - /** - *
-     * For extensions, this is the name of the type being extended.  It is
-     * resolved in the same manner as type_name.
-     * 
- * - * optional string extendee = 2; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getExtendeeBytes(); - - /** - *
-     * For numeric types, contains the original text representation of the value.
-     * For booleans, "true" or "false".
-     * For strings, contains the default text contents (not escaped in any way).
-     * For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
-     * TODO(kenton):  Base-64 encode?
-     * 
- * - * optional string default_value = 7; - */ - boolean hasDefaultValue(); - /** - *
-     * For numeric types, contains the original text representation of the value.
-     * For booleans, "true" or "false".
-     * For strings, contains the default text contents (not escaped in any way).
-     * For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
-     * TODO(kenton):  Base-64 encode?
-     * 
- * - * optional string default_value = 7; - */ - java.lang.String getDefaultValue(); - /** - *
-     * For numeric types, contains the original text representation of the value.
-     * For booleans, "true" or "false".
-     * For strings, contains the default text contents (not escaped in any way).
-     * For bytes, contains the C escaped value.  All bytes >= 128 are escaped.
-     * TODO(kenton):  Base-64 encode?
-     * 
- * - * optional string default_value = 7; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getDefaultValueBytes(); - - /** - *
-     * If set, gives the index of a oneof in the containing type's oneof_decl
-     * list.  This field is a member of that oneof.
-     * 
- * - * optional int32 oneof_index = 9; - */ - boolean hasOneofIndex(); - /** - *
-     * If set, gives the index of a oneof in the containing type's oneof_decl
-     * list.  This field is a member of that oneof.
-     * 
- * - * optional int32 oneof_index = 9; - */ - int getOneofIndex(); - - /** - *
-     * JSON name of this field. The value is set by protocol compiler. If the
-     * user has set a "json_name" option on this field, that option's value
-     * will be used. Otherwise, it's deduced from the field's name by converting
-     * it to camelCase.
-     * 
- * - * optional string json_name = 10; - */ - boolean hasJsonName(); - /** - *
-     * JSON name of this field. The value is set by protocol compiler. If the
-     * user has set a "json_name" option on this field, that option's value
-     * will be used. Otherwise, it's deduced from the field's name by converting
-     * it to camelCase.
-     * 
- * - * optional string json_name = 10; - */ - java.lang.String getJsonName(); - /** - *
-     * JSON name of this field. The value is set by protocol compiler. If the
-     * user has set a "json_name" option on this field, that option's value
-     * will be used. Otherwise, it's deduced from the field's name by converting
-     * it to camelCase.
-     * 
- * - * optional string json_name = 10; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString - getJsonNameBytes(); - - /** - * optional .google.protobuf.FieldOptions options = 8; - */ - boolean hasOptions(); - /** - * optional .google.protobuf.FieldOptions options = 8; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions getOptions(); - /** - * optional .google.protobuf.FieldOptions options = 8; - */ - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptionsOrBuilder getOptionsOrBuilder(); - } - /** - *
-   * Describes a field within a message.
-   * 
- * - * Protobuf type {@code google.protobuf.FieldDescriptorProto} - */ - public static final class FieldDescriptorProto extends - org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:google.protobuf.FieldDescriptorProto) - FieldDescriptorProtoOrBuilder { - // Use FieldDescriptorProto.newBuilder() to construct. - private FieldDescriptorProto(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private FieldDescriptorProto() { - name_ = ""; - number_ = 0; - label_ = 1; - type_ = 1; - typeName_ = ""; - extendee_ = ""; - defaultValue_ = ""; - oneofIndex_ = 0; - jsonName_ = ""; - } - - @java.lang.Override - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private FieldDescriptorProto( - org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, - org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { - this(); - int mutable_bitField0_ = 0; - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = - org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000001; - name_ = bs; - break; - } - case 18: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000020; - extendee_ = bs; - break; - } - case 24: { - bitField0_ |= 0x00000002; - number_ = input.readInt32(); - break; - } - case 32: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Label.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(4, rawValue); - } else { - bitField0_ |= 0x00000004; - label_ = rawValue; - } - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type value = org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Type.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000008; - type_ = rawValue; - } - break; - } - case 50: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000010; - typeName_ = bs; - break; - } - case 58: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000040; - defaultValue_ = bs; - break; - } - case 66: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.Builder subBuilder = null; - if (((bitField0_ & 0x00000200) == 0x00000200)) { - subBuilder = options_.toBuilder(); - } - options_ = input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(options_); - options_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000200; - break; - } - case 72: { - bitField0_ |= 0x00000080; - oneofIndex_ = input.readInt32(); - break; - } - case 82: { - org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); - bitField0_ |= 0x00000100; - jsonName_ = bs; - break; - } - } - } - } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_descriptor; - } - - protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.internal_static_google_protobuf_FieldDescriptorProto_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.Builder.class); - } - - /** - * Protobuf enum {@code google.protobuf.FieldDescriptorProto.Type} - */ - public enum Type - implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { - /** - *
-       * 0 is reserved for errors.
-       * Order is weird for historical reasons.
-       * 
- * - * TYPE_DOUBLE = 1; - */ - TYPE_DOUBLE(1), - /** - * TYPE_FLOAT = 2; - */ - TYPE_FLOAT(2), - /** - *
-       * Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-       * negative values are likely.
-       * 
- * - * TYPE_INT64 = 3; - */ - TYPE_INT64(3), - /** - * TYPE_UINT64 = 4; - */ - TYPE_UINT64(4), - /** - *
-       * Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-       * negative values are likely.
-       * 
- * - * TYPE_INT32 = 5; - */ - TYPE_INT32(5), - /** - * TYPE_FIXED64 = 6; - */ - TYPE_FIXED64(6), - /** - * TYPE_FIXED32 = 7; - */ - TYPE_FIXED32(7), - /** - * TYPE_BOOL = 8; - */ - TYPE_BOOL(8), - /** - * TYPE_STRING = 9; - */ - TYPE_STRING(9), - /** - *
-       * Tag-delimited aggregate.
-       * Group type is deprecated and not supported in proto3. However, Proto3
-       * implementations should still be able to parse the group wire format and
-       * treat group fields as unknown fields.
-       * 
- * - * TYPE_GROUP = 10; - */ - TYPE_GROUP(10), - /** - *
-       * Length-delimited aggregate.
-       * 
- * - * TYPE_MESSAGE = 11; - */ - TYPE_MESSAGE(11), - /** - *
-       * New in version 2.
-       * 
- * - * TYPE_BYTES = 12; - */ - TYPE_BYTES(12), - /** - * TYPE_UINT32 = 13; - */ - TYPE_UINT32(13), - /** - * TYPE_ENUM = 14; - */ - TYPE_ENUM(14), - /** - * TYPE_SFIXED32 = 15; - */ - TYPE_SFIXED32(15), - /** - * TYPE_SFIXED64 = 16; - */ - TYPE_SFIXED64(16), - /** - *
-       * Uses ZigZag encoding.
-       * 
- * - * TYPE_SINT32 = 17; - */ - TYPE_SINT32(17), - /** - *
-       * Uses ZigZag encoding.
-       * 
- * - * TYPE_SINT64 = 18; - */ - TYPE_SINT64(18), - ; - - /** - *
-       * 0 is reserved for errors.
-       * Order is weird for historical reasons.
-       * 
- * - * TYPE_DOUBLE = 1; - */ - public static final int TYPE_DOUBLE_VALUE = 1; - /** - * TYPE_FLOAT = 2; - */ - public static final int TYPE_FLOAT_VALUE = 2; - /** - *
-       * Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-       * negative values are likely.
-       * 
- * - * TYPE_INT64 = 3; - */ - public static final int TYPE_INT64_VALUE = 3; - /** - * TYPE_UINT64 = 4; - */ - public static final int TYPE_UINT64_VALUE = 4; - /** - *
-       * Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-       * negative values are likely.
-       * 
- * - * TYPE_INT32 = 5; - */ - public static final int TYPE_INT32_VALUE = 5; - /** - * TYPE_FIXED64 = 6; - */ - public static final int TYPE_FIXED64_VALUE = 6; - /** - * TYPE_FIXED32 = 7; - */ - public static final int TYPE_FIXED32_VALUE = 7; - /** - * TYPE_BOOL = 8; - */ - public static final int TYPE_BOOL_VALUE = 8; - /** - * TYPE_STRING = 9; - */ - public static final int TYPE_STRING_VALUE = 9; - /** - *
-       * Tag-delimited aggregate.
-       * Group type is deprecated and not supported in proto3. However, Proto3
-       * implementations should still be able to parse the group wire format and
-       * treat group fields as unknown fields.
-       * 
- * - * TYPE_GROUP = 10; - */ - public static final int TYPE_GROUP_VALUE = 10; - /** - *
-       * Length-delimited aggregate.
-       * 
- * - * TYPE_MESSAGE = 11; - */ - public static final int TYPE_MESSAGE_VALUE = 11; - /** - *
-       * New in version 2.
-       * 
- * - * TYPE_BYTES = 12; - */ - public static final int TYPE_BYTES_VALUE = 12; - /** - * TYPE_UINT32 = 13; - */ - public static final int TYPE_UINT32_VALUE = 13; - /** - * TYPE_ENUM = 14; - */ - public static final int TYPE_ENUM_VALUE = 14; - /** - * TYPE_SFIXED32 = 15; - */ - public static final int TYPE_SFIXED32_VALUE = 15; - /** - * TYPE_SFIXED64 = 16; - */ - public static final int TYPE_SFIXED64_VALUE = 16; - /** - *
-       * Uses ZigZag encoding.
-       * 
- * - * TYPE_SINT32 = 17; - */ - public static final int TYPE_SINT32_VALUE = 17; - /** - *
-       * Uses ZigZag encoding.
-       * 
- * - * TYPE_SINT64 = 18; - */ - public static final int TYPE_SINT64_VALUE = 18; - - - public final int getNumber() { - return value; - } - - /** - * @deprecated Use {@link #forNumber(int)} instead. - */ - @java.lang.Deprecated - public static Type valueOf(int value) { - return forNumber(value); - } - - public static Type forNumber(int value) { - switch (value) { - case 1: return TYPE_DOUBLE; - case 2: return TYPE_FLOAT; - case 3: return TYPE_INT64; - case 4: return TYPE_UINT64; - case 5: return TYPE_INT32; - case 6: return TYPE_FIXED64; - case 7: return TYPE_FIXED32; - case 8: return TYPE_BOOL; - case 9: return TYPE_STRING; - case 10: return TYPE_GROUP; - case 11: return TYPE_MESSAGE; - case 12: return TYPE_BYTES; - case 13: return TYPE_UINT32; - case 14: return TYPE_ENUM; - case 15: return TYPE_SFIXED32; - case 16: return TYPE_SFIXED64; - case 17: return TYPE_SINT32; - case 18: return TYPE_SINT64; - default: return null; - } - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< - Type> internalValueMap = - new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap() { - public Type findValueByNumber(int number) { - return Type.forNumber(number); - } - }; - - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(ordinal()); - } - public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto.getDescriptor().getEnumTypes().get(0); - } - - private static final Type[] VALUES = values(); - - public static Type valueOf( - org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int value; - - private Type(int value) { - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:google.protobuf.FieldDescriptorProto.Type) - } - - /** - * Protobuf enum {@code google.protobuf.FieldDescriptorProto.Label} - */ - public enum Label - implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { - /** - *
-       * 0 is reserved for errors
-       * 
- * - * LABEL_OPTIONAL = 1; - */ - LABEL_OPTIONAL(1), - /** - * LABEL_REQUIRED = 2; - */ - LABEL_REQUIRED(2), - /** - * LABEL_REPEATED = 3; - */ - LABEL_REPEATED(3), - ; - - /** - *
-       * 0 is reserved for errors
-       * 
- * - * LABEL_OPTIONAL = 1; - */ - public static final int LABEL_OPTIONAL_VALUE = 1; - /** - * LABEL_REQUIRED = 2; - */ - public static final int LABEL_REQUIRED_VALUE = 2; - /** - * LABEL_REPEATED = 3; - */ - public static final int LABEL_REPEATED_VALUE = 3; - - - public final int getNumber() { - return value; - } - - /** - * @deprecated Use {@link #forNumber(int)} instead. - */ - @java.lang.Deprecated - public static Label valueOf(int value) { - return forNumber(value); - } - - public static Label forNumber(int value) { - switch (value) { - case 1: return LABEL_OPTIONAL; - case 2: return LABEL_REQUIRED; - case 3: return LABEL_REPEATED; - default: return null; - } - } - - public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap