diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java new file mode 100644 index 00000000000..2766a7b1c04 --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/SecureBulkLoadProtos.java @@ -0,0 +1,3977 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: SecureBulkLoad.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class SecureBulkLoadProtos { + private SecureBulkLoadProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface SecureBulkLoadHFilesRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + java.util.List + getFamilyPathList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + int getFamilyPathCount(); + java.util.List + getFamilyPathOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index); + + // optional bool assignSeqNum = 2; + boolean hasAssignSeqNum(); + boolean getAssignSeqNum(); + + // required .DelegationTokenProto fsToken = 3; + boolean hasFsToken(); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken(); + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder(); + + // required string bulkToken = 4; + boolean hasBulkToken(); + String getBulkToken(); + } + public static final class SecureBulkLoadHFilesRequest extends + com.google.protobuf.GeneratedMessage + implements SecureBulkLoadHFilesRequestOrBuilder { + // Use SecureBulkLoadHFilesRequest.newBuilder() to construct. + private SecureBulkLoadHFilesRequest(Builder builder) { + super(builder); + } + private SecureBulkLoadHFilesRequest(boolean noInit) {} + + private static final SecureBulkLoadHFilesRequest defaultInstance; + public static SecureBulkLoadHFilesRequest getDefaultInstance() { + return defaultInstance; + } + + public SecureBulkLoadHFilesRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable; + } + + private int bitField0_; + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + public static final int FAMILYPATH_FIELD_NUMBER = 1; + private java.util.List familyPath_; + public java.util.List getFamilyPathList() { + return familyPath_; + } + public java.util.List + getFamilyPathOrBuilderList() { + return familyPath_; + } + public int getFamilyPathCount() { + return familyPath_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + return familyPath_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + return familyPath_.get(index); + } + + // optional bool assignSeqNum = 2; + public static final int ASSIGNSEQNUM_FIELD_NUMBER = 2; + private boolean assignSeqNum_; + public boolean hasAssignSeqNum() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getAssignSeqNum() { + return assignSeqNum_; + } + + // required .DelegationTokenProto fsToken = 3; + public static final int FSTOKEN_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto fsToken_; + public boolean hasFsToken() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken() { + return fsToken_; + } + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder() { + return fsToken_; + } + + // required string bulkToken = 4; + public static final int BULKTOKEN_FIELD_NUMBER = 4; + private java.lang.Object bulkToken_; + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + bulkToken_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + familyPath_ = java.util.Collections.emptyList(); + assignSeqNum_ = false; + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + bulkToken_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFsToken()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasBulkToken()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < familyPath_.size(); i++) { + output.writeMessage(1, familyPath_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(2, assignSeqNum_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(3, fsToken_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(4, getBulkTokenBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < familyPath_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, familyPath_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, assignSeqNum_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, fsToken_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getBulkTokenBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) obj; + + boolean result = true; + result = result && getFamilyPathList() + .equals(other.getFamilyPathList()); + result = result && (hasAssignSeqNum() == other.hasAssignSeqNum()); + if (hasAssignSeqNum()) { + result = result && (getAssignSeqNum() + == other.getAssignSeqNum()); + } + result = result && (hasFsToken() == other.hasFsToken()); + if (hasFsToken()) { + result = result && getFsToken() + .equals(other.getFsToken()); + } + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getFamilyPathCount() > 0) { + hash = (37 * hash) + FAMILYPATH_FIELD_NUMBER; + hash = (53 * hash) + getFamilyPathList().hashCode(); + } + if (hasAssignSeqNum()) { + hash = (37 * hash) + ASSIGNSEQNUM_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getAssignSeqNum()); + } + if (hasFsToken()) { + hash = (37 * hash) + FSTOKEN_FIELD_NUMBER; + hash = (53 * hash) + getFsToken().hashCode(); + } + if (hasBulkToken()) { + hash = (37 * hash) + BULKTOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getFamilyPathFieldBuilder(); + getFsTokenFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + familyPathBuilder_.clear(); + } + assignSeqNum_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + if (fsTokenBuilder_ == null) { + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + } else { + fsTokenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (familyPathBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + familyPath_ = java.util.Collections.unmodifiableList(familyPath_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.familyPath_ = familyPath_; + } else { + result.familyPath_ = familyPathBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.assignSeqNum_ = assignSeqNum_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + if (fsTokenBuilder_ == null) { + result.fsToken_ = fsToken_; + } else { + result.fsToken_ = fsTokenBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + result.bulkToken_ = bulkToken_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance()) return this; + if (familyPathBuilder_ == null) { + if (!other.familyPath_.isEmpty()) { + if (familyPath_.isEmpty()) { + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFamilyPathIsMutable(); + familyPath_.addAll(other.familyPath_); + } + onChanged(); + } + } else { + if (!other.familyPath_.isEmpty()) { + if (familyPathBuilder_.isEmpty()) { + familyPathBuilder_.dispose(); + familyPathBuilder_ = null; + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000001); + familyPathBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyPathFieldBuilder() : null; + } else { + familyPathBuilder_.addAllMessages(other.familyPath_); + } + } + } + if (other.hasAssignSeqNum()) { + setAssignSeqNum(other.getAssignSeqNum()); + } + if (other.hasFsToken()) { + mergeFsToken(other.getFsToken()); + } + if (other.hasBulkToken()) { + setBulkToken(other.getBulkToken()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFsToken()) { + + return false; + } + if (!hasBulkToken()) { + + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyPath(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + assignSeqNum_ = input.readBool(); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder(); + if (hasFsToken()) { + subBuilder.mergeFrom(getFsToken()); + } + input.readMessage(subBuilder, extensionRegistry); + setFsToken(subBuilder.buildPartial()); + break; + } + case 34: { + bitField0_ |= 0x00000008; + bulkToken_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 1; + private java.util.List familyPath_ = + java.util.Collections.emptyList(); + private void ensureFamilyPathIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + familyPath_ = new java.util.ArrayList(familyPath_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; + + public java.util.List getFamilyPathList() { + if (familyPathBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyPath_); + } else { + return familyPathBuilder_.getMessageList(); + } + } + public int getFamilyPathCount() { + if (familyPathBuilder_ == null) { + return familyPath_.size(); + } else { + return familyPathBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); + } else { + return familyPathBuilder_.getMessage(index); + } + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.set(index, value); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.set(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(value); + onChanged(); + } else { + familyPathBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(index, value); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyPath( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyPath( + java.lang.Iterable values) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + super.addAll(values, familyPath_); + onChanged(); + } else { + familyPathBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyPath() { + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + familyPathBuilder_.clear(); + } + return this; + } + public Builder removeFamilyPath(int index) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.remove(index); + onChanged(); + } else { + familyPathBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); } else { + return familyPathBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyPathOrBuilderList() { + if (familyPathBuilder_ != null) { + return familyPathBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyPath_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { + return getFamilyPathFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public java.util.List + getFamilyPathBuilderList() { + return getFamilyPathFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> + getFamilyPathFieldBuilder() { + if (familyPathBuilder_ == null) { + familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( + familyPath_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + familyPath_ = null; + } + return familyPathBuilder_; + } + + // optional bool assignSeqNum = 2; + private boolean assignSeqNum_ ; + public boolean hasAssignSeqNum() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getAssignSeqNum() { + return assignSeqNum_; + } + public Builder setAssignSeqNum(boolean value) { + bitField0_ |= 0x00000002; + assignSeqNum_ = value; + onChanged(); + return this; + } + public Builder clearAssignSeqNum() { + bitField0_ = (bitField0_ & ~0x00000002); + assignSeqNum_ = false; + onChanged(); + return this; + } + + // required .DelegationTokenProto fsToken = 3; + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder> fsTokenBuilder_; + public boolean hasFsToken() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getFsToken() { + if (fsTokenBuilder_ == null) { + return fsToken_; + } else { + return fsTokenBuilder_.getMessage(); + } + } + public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto value) { + if (fsTokenBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + fsToken_ = value; + onChanged(); + } else { + fsTokenBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder setFsToken( + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder builderForValue) { + if (fsTokenBuilder_ == null) { + fsToken_ = builderForValue.build(); + onChanged(); + } else { + fsTokenBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto value) { + if (fsTokenBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + fsToken_ != org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance()) { + fsToken_ = + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder(fsToken_).mergeFrom(value).buildPartial(); + } else { + fsToken_ = value; + } + onChanged(); + } else { + fsTokenBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder clearFsToken() { + if (fsTokenBuilder_ == null) { + fsToken_ = org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + onChanged(); + } else { + fsTokenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder getFsTokenBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getFsTokenFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder getFsTokenOrBuilder() { + if (fsTokenBuilder_ != null) { + return fsTokenBuilder_.getMessageOrBuilder(); + } else { + return fsToken_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder> + getFsTokenFieldBuilder() { + if (fsTokenBuilder_ == null) { + fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder>( + fsToken_, + getParentForChildren(), + isClean()); + fsToken_ = null; + } + return fsTokenBuilder_; + } + + // required string bulkToken = 4; + private java.lang.Object bulkToken_ = ""; + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setBulkToken(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + bulkToken_ = value; + onChanged(); + return this; + } + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000008); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + void setBulkToken(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000008; + bulkToken_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:SecureBulkLoadHFilesRequest) + } + + static { + defaultInstance = new SecureBulkLoadHFilesRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SecureBulkLoadHFilesRequest) + } + + public interface SecureBulkLoadHFilesResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool loaded = 1; + boolean hasLoaded(); + boolean getLoaded(); + } + public static final class SecureBulkLoadHFilesResponse extends + com.google.protobuf.GeneratedMessage + implements SecureBulkLoadHFilesResponseOrBuilder { + // Use SecureBulkLoadHFilesResponse.newBuilder() to construct. + private SecureBulkLoadHFilesResponse(Builder builder) { + super(builder); + } + private SecureBulkLoadHFilesResponse(boolean noInit) {} + + private static final SecureBulkLoadHFilesResponse defaultInstance; + public static SecureBulkLoadHFilesResponse getDefaultInstance() { + return defaultInstance; + } + + public SecureBulkLoadHFilesResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool loaded = 1; + public static final int LOADED_FIELD_NUMBER = 1; + private boolean loaded_; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + + private void initFields() { + loaded_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLoaded()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, loaded_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, loaded_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) obj; + + boolean result = true; + result = result && (hasLoaded() == other.hasLoaded()); + if (hasLoaded()) { + result = result && (getLoaded() + == other.getLoaded()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLoaded()) { + hash = (37 * hash) + LOADED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLoaded()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + loaded_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.loaded_ = loaded_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()) return this; + if (other.hasLoaded()) { + setLoaded(other.getLoaded()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLoaded()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + loaded_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool loaded = 1; + private boolean loaded_ ; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + public Builder setLoaded(boolean value) { + bitField0_ |= 0x00000001; + loaded_ = value; + onChanged(); + return this; + } + public Builder clearLoaded() { + bitField0_ = (bitField0_ & ~0x00000001); + loaded_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SecureBulkLoadHFilesResponse) + } + + static { + defaultInstance = new SecureBulkLoadHFilesResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SecureBulkLoadHFilesResponse) + } + + public interface DelegationTokenProtoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bytes identifier = 1; + boolean hasIdentifier(); + com.google.protobuf.ByteString getIdentifier(); + + // optional bytes password = 2; + boolean hasPassword(); + com.google.protobuf.ByteString getPassword(); + + // optional string kind = 3; + boolean hasKind(); + String getKind(); + + // optional string service = 4; + boolean hasService(); + String getService(); + } + public static final class DelegationTokenProto extends + com.google.protobuf.GeneratedMessage + implements DelegationTokenProtoOrBuilder { + // Use DelegationTokenProto.newBuilder() to construct. + private DelegationTokenProto(Builder builder) { + super(builder); + } + private DelegationTokenProto(boolean noInit) {} + + private static final DelegationTokenProto defaultInstance; + public static DelegationTokenProto getDefaultInstance() { + return defaultInstance; + } + + public DelegationTokenProto getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable; + } + + private int bitField0_; + // optional bytes identifier = 1; + public static final int IDENTIFIER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString identifier_; + public boolean hasIdentifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getIdentifier() { + return identifier_; + } + + // optional bytes password = 2; + public static final int PASSWORD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString password_; + public boolean hasPassword() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getPassword() { + return password_; + } + + // optional string kind = 3; + public static final int KIND_FIELD_NUMBER = 3; + private java.lang.Object kind_; + public boolean hasKind() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getKind() { + java.lang.Object ref = kind_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + kind_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getKindBytes() { + java.lang.Object ref = kind_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + kind_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string service = 4; + public static final int SERVICE_FIELD_NUMBER = 4; + private java.lang.Object service_; + public boolean hasService() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getService() { + java.lang.Object ref = service_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + service_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getServiceBytes() { + java.lang.Object ref = service_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + service_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + identifier_ = com.google.protobuf.ByteString.EMPTY; + password_ = com.google.protobuf.ByteString.EMPTY; + kind_ = ""; + service_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, identifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, password_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getKindBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getServiceBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, identifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, password_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getKindBytes()); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getServiceBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) obj; + + boolean result = true; + result = result && (hasIdentifier() == other.hasIdentifier()); + if (hasIdentifier()) { + result = result && getIdentifier() + .equals(other.getIdentifier()); + } + result = result && (hasPassword() == other.hasPassword()); + if (hasPassword()) { + result = result && getPassword() + .equals(other.getPassword()); + } + result = result && (hasKind() == other.hasKind()); + if (hasKind()) { + result = result && getKind() + .equals(other.getKind()); + } + result = result && (hasService() == other.hasService()); + if (hasService()) { + result = result && getService() + .equals(other.getService()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasIdentifier()) { + hash = (37 * hash) + IDENTIFIER_FIELD_NUMBER; + hash = (53 * hash) + getIdentifier().hashCode(); + } + if (hasPassword()) { + hash = (37 * hash) + PASSWORD_FIELD_NUMBER; + hash = (53 * hash) + getPassword().hashCode(); + } + if (hasKind()) { + hash = (37 * hash) + KIND_FIELD_NUMBER; + hash = (53 * hash) + getKind().hashCode(); + } + if (hasService()) { + hash = (37 * hash) + SERVICE_FIELD_NUMBER; + hash = (53 * hash) + getService().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProtoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_DelegationTokenProto_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + identifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + password_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + kind_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + service_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.identifier_ = identifier_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.password_ = password_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.kind_ = kind_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.service_ = service_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.getDefaultInstance()) return this; + if (other.hasIdentifier()) { + setIdentifier(other.getIdentifier()); + } + if (other.hasPassword()) { + setPassword(other.getPassword()); + } + if (other.hasKind()) { + setKind(other.getKind()); + } + if (other.hasService()) { + setService(other.getService()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + identifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + password_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + kind_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + service_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // optional bytes identifier = 1; + private com.google.protobuf.ByteString identifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasIdentifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getIdentifier() { + return identifier_; + } + public Builder setIdentifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + identifier_ = value; + onChanged(); + return this; + } + public Builder clearIdentifier() { + bitField0_ = (bitField0_ & ~0x00000001); + identifier_ = getDefaultInstance().getIdentifier(); + onChanged(); + return this; + } + + // optional bytes password = 2; + private com.google.protobuf.ByteString password_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasPassword() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getPassword() { + return password_; + } + public Builder setPassword(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + password_ = value; + onChanged(); + return this; + } + public Builder clearPassword() { + bitField0_ = (bitField0_ & ~0x00000002); + password_ = getDefaultInstance().getPassword(); + onChanged(); + return this; + } + + // optional string kind = 3; + private java.lang.Object kind_ = ""; + public boolean hasKind() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getKind() { + java.lang.Object ref = kind_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + kind_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setKind(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + kind_ = value; + onChanged(); + return this; + } + public Builder clearKind() { + bitField0_ = (bitField0_ & ~0x00000004); + kind_ = getDefaultInstance().getKind(); + onChanged(); + return this; + } + void setKind(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + kind_ = value; + onChanged(); + } + + // optional string service = 4; + private java.lang.Object service_ = ""; + public boolean hasService() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getService() { + java.lang.Object ref = service_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + service_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setService(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + service_ = value; + onChanged(); + return this; + } + public Builder clearService() { + bitField0_ = (bitField0_ & ~0x00000008); + service_ = getDefaultInstance().getService(); + onChanged(); + return this; + } + void setService(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000008; + service_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:DelegationTokenProto) + } + + static { + defaultInstance = new DelegationTokenProto(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DelegationTokenProto) + } + + public interface PrepareBulkLoadRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + } + public static final class PrepareBulkLoadRequest extends + com.google.protobuf.GeneratedMessage + implements PrepareBulkLoadRequestOrBuilder { + // Use PrepareBulkLoadRequest.newBuilder() to construct. + private PrepareBulkLoadRequest(Builder builder) { + super(builder); + } + private PrepareBulkLoadRequest(boolean noInit) {} + + private static final PrepareBulkLoadRequest defaultInstance; + public static PrepareBulkLoadRequest getDefaultInstance() { + return defaultInstance; + } + + public PrepareBulkLoadRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:PrepareBulkLoadRequest) + } + + static { + defaultInstance = new PrepareBulkLoadRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PrepareBulkLoadRequest) + } + + public interface PrepareBulkLoadResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string bulkToken = 1; + boolean hasBulkToken(); + String getBulkToken(); + } + public static final class PrepareBulkLoadResponse extends + com.google.protobuf.GeneratedMessage + implements PrepareBulkLoadResponseOrBuilder { + // Use PrepareBulkLoadResponse.newBuilder() to construct. + private PrepareBulkLoadResponse(Builder builder) { + super(builder); + } + private PrepareBulkLoadResponse(boolean noInit) {} + + private static final PrepareBulkLoadResponse defaultInstance; + public static PrepareBulkLoadResponse getDefaultInstance() { + return defaultInstance; + } + + public PrepareBulkLoadResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_fieldAccessorTable; + } + + private int bitField0_; + // required string bulkToken = 1; + public static final int BULKTOKEN_FIELD_NUMBER = 1; + private java.lang.Object bulkToken_; + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + bulkToken_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + bulkToken_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasBulkToken()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getBulkTokenBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getBulkTokenBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) obj; + + boolean result = true; + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasBulkToken()) { + hash = (37 * hash) + BULKTOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_PrepareBulkLoadResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.bulkToken_ = bulkToken_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance()) return this; + if (other.hasBulkToken()) { + setBulkToken(other.getBulkToken()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasBulkToken()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bulkToken_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string bulkToken = 1; + private java.lang.Object bulkToken_ = ""; + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setBulkToken(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + return this; + } + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000001); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + void setBulkToken(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:PrepareBulkLoadResponse) + } + + static { + defaultInstance = new PrepareBulkLoadResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:PrepareBulkLoadResponse) + } + + public interface CleanupBulkLoadRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string bulkToken = 1; + boolean hasBulkToken(); + String getBulkToken(); + } + public static final class CleanupBulkLoadRequest extends + com.google.protobuf.GeneratedMessage + implements CleanupBulkLoadRequestOrBuilder { + // Use CleanupBulkLoadRequest.newBuilder() to construct. + private CleanupBulkLoadRequest(Builder builder) { + super(builder); + } + private CleanupBulkLoadRequest(boolean noInit) {} + + private static final CleanupBulkLoadRequest defaultInstance; + public static CleanupBulkLoadRequest getDefaultInstance() { + return defaultInstance; + } + + public CleanupBulkLoadRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_fieldAccessorTable; + } + + private int bitField0_; + // required string bulkToken = 1; + public static final int BULKTOKEN_FIELD_NUMBER = 1; + private java.lang.Object bulkToken_; + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + bulkToken_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getBulkTokenBytes() { + java.lang.Object ref = bulkToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + bulkToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + bulkToken_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasBulkToken()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getBulkTokenBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getBulkTokenBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) obj; + + boolean result = true; + result = result && (hasBulkToken() == other.hasBulkToken()); + if (hasBulkToken()) { + result = result && getBulkToken() + .equals(other.getBulkToken()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasBulkToken()) { + hash = (37 * hash) + BULKTOKEN_FIELD_NUMBER; + hash = (53 * hash) + getBulkToken().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + bulkToken_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.bulkToken_ = bulkToken_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance()) return this; + if (other.hasBulkToken()) { + setBulkToken(other.getBulkToken()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasBulkToken()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + bulkToken_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string bulkToken = 1; + private java.lang.Object bulkToken_ = ""; + public boolean hasBulkToken() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getBulkToken() { + java.lang.Object ref = bulkToken_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + bulkToken_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setBulkToken(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + return this; + } + public Builder clearBulkToken() { + bitField0_ = (bitField0_ & ~0x00000001); + bulkToken_ = getDefaultInstance().getBulkToken(); + onChanged(); + return this; + } + void setBulkToken(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + bulkToken_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:CleanupBulkLoadRequest) + } + + static { + defaultInstance = new CleanupBulkLoadRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CleanupBulkLoadRequest) + } + + public interface CleanupBulkLoadResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CleanupBulkLoadResponse extends + com.google.protobuf.GeneratedMessage + implements CleanupBulkLoadResponseOrBuilder { + // Use CleanupBulkLoadResponse.newBuilder() to construct. + private CleanupBulkLoadResponse(Builder builder) { + super(builder); + } + private CleanupBulkLoadResponse(boolean noInit) {} + + private static final CleanupBulkLoadResponse defaultInstance; + public static CleanupBulkLoadResponse getDefaultInstance() { + return defaultInstance; + } + + public CleanupBulkLoadResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_CleanupBulkLoadResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse build() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse result = new org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CleanupBulkLoadResponse) + } + + static { + defaultInstance = new CleanupBulkLoadResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CleanupBulkLoadResponse) + } + + public static abstract class SecureBulkLoadService + implements com.google.protobuf.Service { + protected SecureBulkLoadService() {} + + public interface Interface { + public abstract void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void secureBulkLoadHFiles( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new SecureBulkLoadService() { + @java.lang.Override + public void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + impl.prepareBulkLoad(controller, request, done); + } + + @java.lang.Override + public void secureBulkLoadHFiles( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, + com.google.protobuf.RpcCallback done) { + impl.secureBulkLoadHFiles(controller, request, done); + } + + @java.lang.Override + public void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + impl.cleanupBulkLoad(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)request); + case 1: + return impl.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request); + case 2: + return impl.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void secureBulkLoadHFiles( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.prepareBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.secureBulkLoadHFiles(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.cleanupBulkLoad(controller, (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance())); + } + + public void secureBulkLoadHFiles( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance())); + } + + public void cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse prepareBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse secureBulkLoadHFiles( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse cleanupBulkLoad( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SecureBulkLoadHFilesRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SecureBulkLoadHFilesResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DelegationTokenProto_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DelegationTokenProto_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PrepareBulkLoadRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PrepareBulkLoadRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_PrepareBulkLoadResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_PrepareBulkLoadResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CleanupBulkLoadRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CleanupBulkLoadRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CleanupBulkLoadResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CleanupBulkLoadResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\024SecureBulkLoad.proto\032\013hbase.proto\032\014Cli" + + "ent.proto\"\244\001\n\033SecureBulkLoadHFilesReques" + + "t\0224\n\nfamilyPath\030\001 \003(\0132 .BulkLoadHFileReq" + + "uest.FamilyPath\022\024\n\014assignSeqNum\030\002 \001(\010\022&\n" + + "\007fsToken\030\003 \002(\0132\025.DelegationTokenProto\022\021\n" + + "\tbulkToken\030\004 \002(\t\".\n\034SecureBulkLoadHFiles" + + "Response\022\016\n\006loaded\030\001 \002(\010\"[\n\024DelegationTo" + + "kenProto\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010password" + + "\030\002 \001(\014\022\014\n\004kind\030\003 \001(\t\022\017\n\007service\030\004 \001(\t\"+\n" + + "\026PrepareBulkLoadRequest\022\021\n\ttableName\030\001 \002", + "(\014\",\n\027PrepareBulkLoadResponse\022\021\n\tbulkTok" + + "en\030\001 \002(\t\"+\n\026CleanupBulkLoadRequest\022\021\n\tbu" + + "lkToken\030\001 \002(\t\"\031\n\027CleanupBulkLoadResponse" + + "2\370\001\n\025SecureBulkLoadService\022D\n\017prepareBul" + + "kLoad\022\027.PrepareBulkLoadRequest\032\030.Prepare" + + "BulkLoadResponse\022S\n\024secureBulkLoadHFiles" + + "\022\034.SecureBulkLoadHFilesRequest\032\035.SecureB" + + "ulkLoadHFilesResponse\022D\n\017cleanupBulkLoad" + + "\022\027.CleanupBulkLoadRequest\032\030.CleanupBulkL" + + "oadResponseBJ\n*org.apache.hadoop.hbase.p", + "rotobuf.generatedB\024SecureBulkLoadProtosH" + + "\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_SecureBulkLoadHFilesRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_SecureBulkLoadHFilesRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SecureBulkLoadHFilesRequest_descriptor, + new java.lang.String[] { "FamilyPath", "AssignSeqNum", "FsToken", "BulkToken", }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.Builder.class); + internal_static_SecureBulkLoadHFilesResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_SecureBulkLoadHFilesResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SecureBulkLoadHFilesResponse_descriptor, + new java.lang.String[] { "Loaded", }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class); + internal_static_DelegationTokenProto_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_DelegationTokenProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DelegationTokenProto_descriptor, + new java.lang.String[] { "Identifier", "Password", "Kind", "Service", }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.DelegationTokenProto.Builder.class); + internal_static_PrepareBulkLoadRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_PrepareBulkLoadRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PrepareBulkLoadRequest_descriptor, + new java.lang.String[] { "TableName", }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest.Builder.class); + internal_static_PrepareBulkLoadResponse_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_PrepareBulkLoadResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_PrepareBulkLoadResponse_descriptor, + new java.lang.String[] { "BulkToken", }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse.Builder.class); + internal_static_CleanupBulkLoadRequest_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_CleanupBulkLoadRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CleanupBulkLoadRequest_descriptor, + new java.lang.String[] { "BulkToken", }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest.Builder.class); + internal_static_CleanupBulkLoadResponse_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_CleanupBulkLoadResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CleanupBulkLoadResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.class, + org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto b/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto new file mode 100644 index 00000000000..40fe789dea9 --- /dev/null +++ b/hbase-protocol/src/main/protobuf/SecureBulkLoad.proto @@ -0,0 +1,71 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "SecureBulkLoadProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import 'hbase.proto'; +import 'Client.proto'; + +message SecureBulkLoadHFilesRequest { + repeated BulkLoadHFileRequest.FamilyPath familyPath = 1; + optional bool assignSeqNum = 2; + required DelegationTokenProto fsToken = 3; + required string bulkToken = 4; +} + +message SecureBulkLoadHFilesResponse { + required bool loaded = 1; +} + +message DelegationTokenProto { + optional bytes identifier = 1; + optional bytes password = 2; + optional string kind = 3; + optional string service = 4; +} + +message PrepareBulkLoadRequest { + required bytes tableName = 1; +} + +message PrepareBulkLoadResponse { + required string bulkToken = 1; +} + +message CleanupBulkLoadRequest { + required string bulkToken = 1; + +} + +message CleanupBulkLoadResponse { +} + +service SecureBulkLoadService { + rpc prepareBulkLoad(PrepareBulkLoadRequest) + returns (PrepareBulkLoadResponse); + + rpc secureBulkLoadHFiles(SecureBulkLoadHFilesRequest) + returns (SecureBulkLoadHFilesResponse); + + rpc cleanupBulkLoad(CleanupBulkLoadRequest) + returns (CleanupBulkLoadResponse); +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java new file mode 100644 index 00000000000..07d55587474 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.client.coprocessor; + +import com.google.protobuf.ByteString; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.ipc.BlockingRpcCallback; +import org.apache.hadoop.hbase.ipc.ServerRpcController; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos; +import org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.security.token.Token; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Client proxy for SecureBulkLoadProtocol + * used in conjunction with SecureBulkLoadEndpoint + */ +public class SecureBulkLoadClient { + private HTable table; + + public SecureBulkLoadClient(HTable table) { + this.table = table; + } + + public String prepareBulkLoad(final byte[] tableName) throws IOException { + try { + return + table.coprocessorService(SecureBulkLoadProtos.SecureBulkLoadService.class, + HConstants.EMPTY_START_ROW, + HConstants.EMPTY_START_ROW, + new Batch.Call() { + @Override + public String call(SecureBulkLoadProtos.SecureBulkLoadService instance) throws IOException { + ServerRpcController controller = new ServerRpcController(); + + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + SecureBulkLoadProtos.PrepareBulkLoadRequest request = + SecureBulkLoadProtos.PrepareBulkLoadRequest.newBuilder() + .setTableName(com.google.protobuf.ByteString.copyFrom(tableName)).build(); + + instance.prepareBulkLoad(controller, + request, + rpcCallback); + + SecureBulkLoadProtos.PrepareBulkLoadResponse response = rpcCallback.get(); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + return response.getBulkToken(); + } + }).entrySet().iterator().next().getValue(); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public void cleanupBulkLoad(final String bulkToken) throws IOException { + try { + table.coprocessorService(SecureBulkLoadProtos.SecureBulkLoadService.class, + HConstants.EMPTY_START_ROW, + HConstants.EMPTY_START_ROW, + new Batch.Call() { + + @Override + public String call(SecureBulkLoadProtos.SecureBulkLoadService instance) throws IOException { + ServerRpcController controller = new ServerRpcController(); + + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + + SecureBulkLoadProtos.CleanupBulkLoadRequest request = + SecureBulkLoadProtos.CleanupBulkLoadRequest.newBuilder() + .setBulkToken(bulkToken).build(); + + instance.cleanupBulkLoad(controller, + request, + rpcCallback); + + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + return null; + } + }); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public boolean bulkLoadHFiles(final List> familyPaths, + final Token userToken, + final String bulkToken, + final byte[] startRow) throws IOException { + try { + return + table.coprocessorService(SecureBulkLoadProtos.SecureBulkLoadService.class, + startRow, + startRow, + new Batch.Call() { + + @Override + public Boolean call(SecureBulkLoadProtos.SecureBulkLoadService instance) throws IOException { + SecureBulkLoadProtos.DelegationTokenProto protoDT = + SecureBulkLoadProtos.DelegationTokenProto.newBuilder().build(); + if(userToken != null) { + protoDT = + SecureBulkLoadProtos.DelegationTokenProto.newBuilder() + .setIdentifier(ByteString.copyFrom(userToken.getIdentifier())) + .setPassword(ByteString.copyFrom(userToken.getPassword())) + .setKind(userToken.getKind().toString()) + .setService(userToken.getService().toString()).build(); + } + + List protoFamilyPaths = + new ArrayList(); + for(Pair el: familyPaths) { + protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + .setFamily(ByteString.copyFrom(el.getFirst())) + .setPath(el.getSecond()).build()); + } + + SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request = + SecureBulkLoadProtos.SecureBulkLoadHFilesRequest.newBuilder() + .setFsToken(protoDT) + .addAllFamilyPath(protoFamilyPaths) + .setBulkToken(bulkToken).build(); + + ServerRpcController controller = new ServerRpcController(); + BlockingRpcCallback rpcCallback = + new BlockingRpcCallback(); + instance.secureBulkLoadHFiles(controller, + request, + rpcCallback); + + SecureBulkLoadProtos.SecureBulkLoadHFilesResponse response = rpcCallback.get(); + if (controller.failedOnException()) { + throw controller.getFailedOn(); + } + return response.getLoaded(); + } + }).entrySet().iterator().next().getValue(); + } catch (Throwable throwable) { + throw new IOException(throwable); + } + } + + public Path getStagingPath(String bulkToken, byte[] family) throws IOException { + return SecureBulkLoadEndpoint.getStagingPath(table.getConfiguration(), bulkToken, family); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index c703b9ea75a..8afbd05626b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -61,6 +61,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.ServerCallable; +import org.apache.hadoop.hbase.client.coprocessor.SecureBulkLoadClient; import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; @@ -74,8 +75,10 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.StoreFile; +import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; @@ -100,10 +103,21 @@ public class LoadIncrementalHFiles extends Configured implements Tool { private static final String ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers"; private boolean assignSeqIds; - public LoadIncrementalHFiles(Configuration conf) throws Exception { + private boolean useSecure; + private Token userToken; + private String bulkToken; + + //package private for testing + LoadIncrementalHFiles(Configuration conf, Boolean useSecure) throws Exception { super(conf); this.cfg = conf; this.hbAdmin = new HBaseAdmin(conf); + //added simple for testing + this.useSecure = useSecure != null ? useSecure : User.isHBaseSecurityEnabled(conf); + } + + public LoadIncrementalHFiles(Configuration conf) throws Exception { + this(conf, null); assignSeqIds = conf.getBoolean(ASSIGN_SEQ_IDS, true); } @@ -215,6 +229,18 @@ public class LoadIncrementalHFiles extends Configured implements Tool { return; } + //If using secure bulk load + //prepare staging directory and token + if(useSecure) { + FileSystem fs = FileSystem.get(cfg); + //This condition is here for unit testing + //Since delegation token doesn't work in mini cluster + if(User.isSecurityEnabled()) { + userToken = fs.getDelegationToken("renewer"); + } + bulkToken = new SecureBulkLoadClient(table).prepareBulkLoad(table.getTableName()); + } + // Assumes that region splits can happen while this occurs. while (!queue.isEmpty()) { // need to reload split keys each iteration. @@ -243,6 +269,18 @@ public class LoadIncrementalHFiles extends Configured implements Tool { } } finally { + if(useSecure) { + if(userToken != null) { + try { + userToken.cancel(cfg); + } catch (Exception e) { + LOG.warn("Failed to cancel HDFS delegation token.", e); + } + } + if(bulkToken != null) { + new SecureBulkLoadClient(table).cleanupBulkLoad(bulkToken); + } + } pool.shutdown(); if (queue != null && !queue.isEmpty()) { StringBuilder err = new StringBuilder(); @@ -476,11 +514,47 @@ public class LoadIncrementalHFiles extends Configured implements Tool { tableName, first) { @Override public Boolean call() throws Exception { - LOG.debug("Going to connect to server " + location + " for row " - + Bytes.toStringBinary(row)); - byte[] regionName = location.getRegionInfo().getRegionName(); - return ProtobufUtil.bulkLoadHFile(server, famPaths, regionName, - assignSeqIds); + SecureBulkLoadClient secureClient = null; + boolean success = false; + + try { + LOG.debug("Going to connect to server " + location + " for row " + + Bytes.toStringBinary(row)); + byte[] regionName = location.getRegionInfo().getRegionName(); + if(!useSecure) { + success = ProtobufUtil.bulkLoadHFile(server, famPaths, regionName, assignSeqIds); + } else { + HTable table = new HTable(conn.getConfiguration(), tableName); + secureClient = new SecureBulkLoadClient(table); + success = secureClient.bulkLoadHFiles(famPaths, userToken, bulkToken, location.getRegionInfo().getStartKey()); + } + return success; + } finally { + //Best effort copying of files that might not have been imported + //from the staging directory back to original location + //in user directory + if(secureClient != null && !success) { + FileSystem fs = FileSystem.get(cfg); + for(Pair el : famPaths) { + Path hfileStagingPath = null; + Path hfileOrigPath = new Path(el.getSecond()); + try { + hfileStagingPath= new Path(secureClient.getStagingPath(bulkToken, el.getFirst()), + hfileOrigPath.getName()); + if(fs.rename(hfileStagingPath, hfileOrigPath)) { + LOG.debug("Moved back file " + hfileOrigPath + " from " + + hfileStagingPath); + } else if(fs.exists(hfileStagingPath)){ + LOG.debug("Unable to move back file " + hfileOrigPath + " from " + + hfileStagingPath); + } + } catch(Exception ex) { + LOG.debug("Unable to move back file " + hfileOrigPath + " from " + + hfileStagingPath, ex); + } + } + } + } } }; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index c19add2b7b9..c0b449a62c3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -3254,17 +3254,25 @@ public class HRegion implements HeapSize { // , Writable{ return multipleFamilies; } + + public boolean bulkLoadHFiles(List> familyPaths, + boolean assignSeqId) throws IOException { + return bulkLoadHFiles(familyPaths, assignSeqId, null); + } + /** * Attempts to atomically load a group of hfiles. This is critical for loading * rows with multiple column families atomically. * * @param familyPaths List of Pair + * @param bulkLoadListener Internal hooks enabling massaging/preparation of a + * file about to be bulk loaded * @param assignSeqId * @return true if successful, false if failed recoverably * @throws IOException if failed unrecoverably. */ - public boolean bulkLoadHFiles(List> familyPaths, - boolean assignSeqId) throws IOException { + public boolean bulkLoadHFiles(List> familyPaths, boolean assignSeqId, + BulkLoadListener bulkLoadListener) throws IOException { Preconditions.checkNotNull(familyPaths); // we need writeLock for multi-family bulk load startBulkRegionOperation(hasMultipleColumnFamilies(familyPaths)); @@ -3324,7 +3332,14 @@ public class HRegion implements HeapSize { // , Writable{ String path = p.getSecond(); Store store = getStore(familyName); try { - store.bulkLoadHFile(path, assignSeqId ? this.log.obtainSeqNum() : -1); + String finalPath = path; + if(bulkLoadListener != null) { + finalPath = bulkLoadListener.prepareBulkLoad(familyName, path); + } + store.bulkLoadHFile(finalPath, assignSeqId ? this.log.obtainSeqNum() : -1); + if(bulkLoadListener != null) { + bulkLoadListener.doneBulkLoad(familyName, path); + } } catch (IOException ioe) { // A failure here can cause an atomicity violation that we currently // cannot recover from since it is likely a failed HDFS operation. @@ -3332,6 +3347,14 @@ public class HRegion implements HeapSize { // , Writable{ // TODO Need a better story for reverting partial failures due to HDFS. LOG.error("There was a partial failure due to IO when attempting to" + " load " + Bytes.toString(p.getFirst()) + " : "+ p.getSecond(), ioe); + if(bulkLoadListener != null) { + try { + bulkLoadListener.failedBulkLoad(familyName, path); + } catch (Exception ex) { + LOG.error("Error while calling failedBulkLoad for family "+ + Bytes.toString(familyName)+" with path "+path, ex); + } + } throw ioe; } } @@ -5452,4 +5475,38 @@ public class HRegion implements HeapSize { // , Writable{ if (bc != null) bc.shutdown(); } } + + /** + * Listener class to enable callers of + * bulkLoadHFile() to perform any necessary + * pre/post processing of a given bulkload call + */ + public static interface BulkLoadListener { + + /** + * Called before an HFile is actually loaded + * @param family family being loaded to + * @param srcPath path of HFile + * @return final path to be used for actual loading + * @throws IOException + */ + String prepareBulkLoad(byte[] family, String srcPath) throws IOException; + + /** + * Called after a successful HFile load + * @param family family being loaded to + * @param srcPath path of HFile + * @throws IOException + */ + void doneBulkLoad(byte[] family, String srcPath) throws IOException; + + /** + * Called after a failed HFile load + * @param family family being loaded to + * @param srcPath path of HFile + * @throws IOException + */ + void failedBulkLoad(byte[] family, String srcPath) throws IOException; + + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 84052d4e946..8946757d1af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -577,7 +577,11 @@ public class HStore implements Store, StoreConfiguration { // Copy the file if it's on another filesystem FileSystem srcFs = srcPath.getFileSystem(conf); FileSystem desFs = fs instanceof HFileSystem ? ((HFileSystem)fs).getBackingFs() : fs; - if (!srcFs.equals(desFs)) { + //We can't compare FileSystem instances as + //equals() includes UGI instance as part of the comparison + //and won't work when doing SecureBulkLoad + //TODO deal with viewFS + if (!srcFs.getUri().equals(desFs.getUri())) { LOG.info("Bulk-load file " + srcPath + " is on different filesystem than " + "the destination store. Copying file over to destination filesystem."); Path tmpPath = getTmpPath(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index d39c7df1809..4e3b065663e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -18,7 +18,9 @@ import java.io.IOException; import java.net.InetAddress; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; @@ -63,6 +65,7 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.hadoop.hbase.util.Pair; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ListMultimap; @@ -1005,6 +1008,76 @@ public class AccessController extends BaseRegionObserver } } + /** + * Verifies user has WRITE privileges on + * the Column Families involved in the bulkLoadHFile + * request. Specific Column Write privileges are presently + * ignored. + */ + @Override + public void preBulkLoadHFile(ObserverContext ctx, + List> familyPaths) throws IOException { + List cfs = new LinkedList(); + for(Pair el : familyPaths) { + requirePermission("preBulkLoadHFile", + ctx.getEnvironment().getRegion().getTableDesc().getName(), + el.getFirst(), + null, + Permission.Action.WRITE); + } + } + + private AuthResult hasSomeAccess(RegionCoprocessorEnvironment e, String method, Action action) throws IOException { + User requestUser = getActiveUser(); + byte[] tableName = e.getRegion().getTableDesc().getName(); + AuthResult authResult = permissionGranted(method, requestUser, + action, e, Collections.EMPTY_MAP); + if (!authResult.isAllowed()) { + for(UserPermission userPerm: + AccessControlLists.getUserPermissions(regionEnv.getConfiguration(), tableName)) { + for(Permission.Action userAction: userPerm.getActions()) { + if(userAction.equals(action)) { + return AuthResult.allow(method, "Access allowed", requestUser, + action, tableName, null, null); + } + } + } + } + return authResult; + } + + /** + * Authorization check for + * SecureBulkLoadProtocol.prepareBulkLoad() + * @param e + * @throws IOException + */ + //TODO this should end up as a coprocessor hook + public void prePrepareBulkLoad(RegionCoprocessorEnvironment e) throws IOException { + AuthResult authResult = hasSomeAccess(e, "prePrepareBulkLoad", Action.WRITE); + logResult(authResult); + if (!authResult.isAllowed()) { + throw new AccessDeniedException("Insufficient permissions (table=" + + e.getRegion().getTableDesc().getNameAsString() + ", action=WRITE)"); + } + } + + /** + * Authorization security check for + * SecureBulkLoadProtocol.cleanupBulkLoad() + * @param e + * @throws IOException + */ + //TODO this should end up as a coprocessor hook + public void preCleanupBulkLoad(RegionCoprocessorEnvironment e) throws IOException { + AuthResult authResult = hasSomeAccess(e, "preCleanupBulkLoad", Action.WRITE); + logResult(authResult); + if (!authResult.isAllowed()) { + throw new AccessDeniedException("Insufficient permissions (table=" + + e.getRegion().getTableDesc().getNameAsString() + ", action=WRITE)"); + } + } + /* ---- Protobuf AccessControlService implementation ---- */ @Override public void grant(RpcController controller, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java new file mode 100644 index 00000000000..bfef425530b --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -0,0 +1,368 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security.access; + +import com.google.protobuf.RpcCallback; +import com.google.protobuf.RpcController; +import com.google.protobuf.Service; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hbase.Coprocessor; +import org.apache.hadoop.hbase.CoprocessorEnvironment; +import org.apache.hadoop.hbase.DoNotRetryIOException; +import org.apache.hadoop.hbase.coprocessor.CoprocessorService; +import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.ipc.RequestContext; +import org.apache.hadoop.hbase.protobuf.ResponseConverter; +import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest; +import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Methods; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; + +import java.io.IOException; +import java.math.BigInteger; +import java.security.PrivilegedAction; +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.List; + +/** + * Coprocessor service for bulk loads in secure mode. + * This coprocessor has to be installed as part of enabling + * security in HBase. + * + * This service addresses two issues: + * + * 1. Moving files in a secure filesystem wherein the HBase Client + * and HBase Server are different filesystem users. + * 2. Does moving in a secure manner. Assuming that the filesystem + * is POSIX compliant. + * + * The algorithm is as follows: + * + * 1. Create an hbase owned staging directory which is + * world traversable (711): /hbase/staging + * 2. A user writes out data to his secure output directory: /user/foo/data + * 3. A call is made to hbase to create a secret staging directory + * which globally rwx (777): /user/staging/averylongandrandomdirectoryname + * 4. The user makes the data world readable and writable, then moves it + * into the random staging directory, then calls bulkLoadHFiles() + * + * Like delegation tokens the strength of the security lies in the length + * and randomness of the secret directory. + * + */ +@InterfaceAudience.Private +public class SecureBulkLoadEndpoint extends SecureBulkLoadService + implements CoprocessorService, Coprocessor { + + public static final long VERSION = 0L; + + //320/5 = 64 characters + private static final int RANDOM_WIDTH = 320; + private static final int RANDOM_RADIX = 32; + + private static Log LOG = LogFactory.getLog(SecureBulkLoadEndpoint.class); + + private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx"); + private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x"); + private final static String BULKLOAD_STAGING_DIR = "hbase.bulkload.staging.dir"; + + private SecureRandom random; + private FileSystem fs; + private Configuration conf; + + //two levels so it doesn't get deleted accidentally + //no sticky bit in Hadoop 1.0 + private Path baseStagingDir; + + private RegionCoprocessorEnvironment env; + + + @Override + public void start(CoprocessorEnvironment env) { + this.env = (RegionCoprocessorEnvironment)env; + random = new SecureRandom(); + conf = env.getConfiguration(); + baseStagingDir = getBaseStagingDir(conf); + + try { + fs = FileSystem.get(conf); + fs.mkdirs(baseStagingDir, PERM_HIDDEN); + fs.setPermission(baseStagingDir, PERM_HIDDEN); + //no sticky bit in hadoop-1.0, making directory nonempty so it never gets erased + fs.mkdirs(new Path(baseStagingDir,"DONOTERASE"), PERM_HIDDEN); + FileStatus status = fs.getFileStatus(baseStagingDir); + if(status == null) { + throw new IllegalStateException("Failed to create staging directory"); + } + if(!status.getPermission().equals(PERM_HIDDEN)) { + throw new IllegalStateException( + "Directory already exists but permissions aren't set to '-rwx--x--x' "); + } + } catch (IOException e) { + throw new IllegalStateException("Failed to get FileSystem instance",e); + } + } + + @Override + public void stop(CoprocessorEnvironment env) throws IOException { + } + + @Override + public void prepareBulkLoad(RpcController controller, + PrepareBulkLoadRequest request, + RpcCallback done){ + try { + getAccessController().prePrepareBulkLoad(env); + String bulkToken = createStagingDir(baseStagingDir, + getActiveUser(), request.getTableName().toByteArray()).toString(); + done.run(PrepareBulkLoadResponse.newBuilder().setBulkToken(bulkToken).build()); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + } + done.run(null); + } + + @Override + public void cleanupBulkLoad(RpcController controller, + CleanupBulkLoadRequest request, + RpcCallback done) { + try { + getAccessController().preCleanupBulkLoad(env); + fs.delete(createStagingDir(baseStagingDir, + getActiveUser(), + env.getRegion().getTableDesc().getName(), + new Path(request.getBulkToken()).getName()), + true); + done.run(CleanupBulkLoadResponse.newBuilder().build()); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + } + done.run(null); + } + + @Override + public void secureBulkLoadHFiles(RpcController controller, + SecureBulkLoadHFilesRequest request, + RpcCallback done) { + final List> familyPaths = new ArrayList>(); + for(ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) { + familyPaths.add(new Pair(el.getFamily().toByteArray(),el.getPath())); + } + final Token userToken = + new Token(request.getFsToken().getIdentifier().toByteArray(), + request.getFsToken().getPassword().toByteArray(), + new Text(request.getFsToken().getKind()), + new Text(request.getFsToken().getService())); + final String bulkToken = request.getBulkToken(); + User user = getActiveUser(); + final UserGroupInformation ugi = user.getUGI(); + if(userToken != null) { + ugi.addToken(userToken); + } else if(User.isSecurityEnabled()) { + //we allow this to pass through in "simple" security mode + //for mini cluster testing + ResponseConverter.setControllerException(controller, + new DoNotRetryIOException("User token cannot be null")); + return; + } + + HRegion region = env.getRegion(); + boolean bypass = false; + if (region.getCoprocessorHost() != null) { + try { + bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + done.run(null); + return; + } + } + boolean loaded = false; + if (!bypass) { + loaded = ugi.doAs(new PrivilegedAction() { + @Override + public Boolean run() { + FileSystem fs = null; + try { + Configuration conf = env.getConfiguration(); + fs = FileSystem.get(conf); + for(Pair el: familyPaths) { + Path p = new Path(el.getSecond()); + LOG.trace("Setting permission for: " + p); + fs.setPermission(p, PERM_ALL_ACCESS); + Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst())); + if(!fs.exists(stageFamily)) { + fs.mkdirs(stageFamily); + fs.setPermission(stageFamily, PERM_ALL_ACCESS); + } + } + //We call bulkLoadHFiles as requesting user + //To enable access prior to staging + return env.getRegion().bulkLoadHFiles(familyPaths, true, + new SecureBulkLoadListener(fs, bulkToken)); + } catch (Exception e) { + LOG.error("Failed to complete bulk load", e); + } + return false; + } + }); + } + if (region.getCoprocessorHost() != null) { + try { + loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded); + } catch (IOException e) { + ResponseConverter.setControllerException(controller, e); + done.run(null); + return; + } + } + done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build()); + } + + private AccessController getAccessController() { + return (AccessController) this.env.getRegion() + .getCoprocessorHost().findCoprocessor(AccessController.class.getName()); + } + + private Path createStagingDir(Path baseDir, User user, byte[] tableName) throws IOException { + String randomDir = user.getShortName()+"__"+Bytes.toString(tableName)+"__"+ + (new BigInteger(RANDOM_WIDTH, random).toString(RANDOM_RADIX)); + return createStagingDir(baseDir, user, tableName, randomDir); + } + + private Path createStagingDir(Path baseDir, + User user, + byte[] tableName, + String randomDir) throws IOException { + Path p = new Path(baseDir, randomDir); + fs.mkdirs(p, PERM_ALL_ACCESS); + fs.setPermission(p, PERM_ALL_ACCESS); + return p; + } + + private User getActiveUser() { + User user = RequestContext.getRequestUser(); + if (!RequestContext.isInRequestContext()) { + return null; + } + + //this is for testing + if("simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) { + return User.createUserForTesting(conf, user.getShortName(), new String[]{}); + } + + return user; + } + + /** + * This returns the staging path for a given column family. + * This is needed for clean recovery and called reflectively in LoadIncrementalHFiles + */ + public static Path getStagingPath(Configuration conf, String bulkToken, byte[] family) { + Path stageP = new Path(getBaseStagingDir(conf), bulkToken); + return new Path(stageP, Bytes.toString(family)); + } + + private static Path getBaseStagingDir(Configuration conf) { + return new Path(conf.get(BULKLOAD_STAGING_DIR, "/tmp/hbase-staging")); + } + + @Override + public Service getService() { + return this; + } + + private static class SecureBulkLoadListener implements HRegion.BulkLoadListener { + private FileSystem fs; + private String stagingDir; + + public SecureBulkLoadListener(FileSystem fs, String stagingDir) { + this.fs = fs; + this.stagingDir = stagingDir; + } + + @Override + public String prepareBulkLoad(final byte[] family, final String srcPath) throws IOException { + Path p = new Path(srcPath); + Path stageP = new Path(stagingDir, new Path(Bytes.toString(family), p.getName())); + + if(!isFile(p)) { + throw new IOException("Path does not reference a file: " + p); + } + + LOG.debug("Moving " + p + " to " + stageP); + if(!fs.rename(p, stageP)) { + throw new IOException("Failed to move HFile: " + p + " to " + stageP); + } + return stageP.toString(); + } + + @Override + public void doneBulkLoad(byte[] family, String srcPath) throws IOException { + LOG.debug("Bulk Load done for: " + srcPath); + } + + @Override + public void failedBulkLoad(final byte[] family, final String srcPath) throws IOException { + Path p = new Path(srcPath); + Path stageP = new Path(stagingDir, + new Path(Bytes.toString(family), p.getName())); + LOG.debug("Moving " + stageP + " back to " + p); + if(!fs.rename(stageP, p)) + throw new IOException("Failed to move HFile: " + stageP + " to " + p); + } + + /** + * Check if the path is referencing a file. + * This is mainly needed to avoid symlinks. + * @param p + * @return true if the p is a file + * @throws IOException + */ + private boolean isFile(Path p) throws IOException { + FileStatus status = fs.getFileStatus(p); + boolean isFile = !status.isDir(); + try { + isFile = isFile && !(Boolean)Methods.call(FileStatus.class, status, "isSymlink", null, null); + } catch (Exception e) { + } + return isFile; + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java index a88c415cb61..7b27cf5c7fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java @@ -62,7 +62,9 @@ public class TestLoadIncrementalHFiles { public static String COMPRESSION = Compression.Algorithm.NONE.getName(); - private static HBaseTestingUtility util = new HBaseTestingUtility(); + static HBaseTestingUtility util = new HBaseTestingUtility(); + //used by secure subclass + static boolean useSecure = false; @BeforeClass public static void setUpBeforeClass() throws Exception { @@ -151,8 +153,7 @@ public class TestLoadIncrementalHFiles { HTable table = new HTable(util.getConfiguration(), TABLE); util.waitTableAvailable(TABLE, 30000); - LoadIncrementalHFiles loader = new LoadIncrementalHFiles( - util.getConfiguration()); + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration(), useSecure); loader.doBulkLoad(dir, table); assertEquals(expectedRows, util.countRows(table)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java index f3d76e200ef..e4937f7a416 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java @@ -71,7 +71,9 @@ import com.google.protobuf.ServiceException; public class TestLoadIncrementalHFilesSplitRecovery { final static Log LOG = LogFactory.getLog(TestHRegionServerBulkLoad.class); - private static HBaseTestingUtility util; + static HBaseTestingUtility util; + //used by secure subclass + static boolean useSecure = false; final static int NUM_CFS = 10; final static byte[] QUAL = Bytes.toBytes("qual"); @@ -138,8 +140,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { */ private void populateTable(String table, int value) throws Exception { // create HFiles for different column families - LoadIncrementalHFiles lih = new LoadIncrementalHFiles( - util.getConfiguration()); + LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration(), useSecure); Path bulk1 = buildBulkFiles(table, value); HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table)); lih.doBulkLoad(bulk1, t); @@ -231,7 +232,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { final AtomicInteger attmptedCalls = new AtomicInteger(); final AtomicInteger failedCalls = new AtomicInteger(); LoadIncrementalHFiles lih = new LoadIncrementalHFiles( - util.getConfiguration()) { + util.getConfiguration(), useSecure) { protected List tryAtomicRegionLoad(final HConnection conn, byte[] tableName, final byte[] first, Collection lqis) @@ -299,7 +300,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { // files to fail when attempt to atomically import. This is recoverable. final AtomicInteger attemptedCalls = new AtomicInteger(); LoadIncrementalHFiles lih2 = new LoadIncrementalHFiles( - util.getConfiguration()) { + util.getConfiguration(), useSecure) { protected void bulkLoadPhase(final HTable htable, final HConnection conn, ExecutorService pool, Deque queue, @@ -340,7 +341,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { final AtomicInteger countedLqis= new AtomicInteger(); LoadIncrementalHFiles lih = new LoadIncrementalHFiles( - util.getConfiguration()) { + util.getConfiguration(), useSecure) { protected List groupOrSplit( Multimap regionGroups, final LoadQueueItem item, final HTable htable, @@ -372,7 +373,7 @@ public class TestLoadIncrementalHFilesSplitRecovery { setupTable(table, 10); LoadIncrementalHFiles lih = new LoadIncrementalHFiles( - util.getConfiguration()) { + util.getConfiguration(), useSecure) { int i = 0; protected List groupOrSplit( diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java new file mode 100644 index 00000000000..e955ebf4db8 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java @@ -0,0 +1,56 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; + +import org.junit.BeforeClass; +import org.junit.experimental.categories.Category; + +/** + * Reruns TestLoadIncrementalHFiles using LoadIncrementalHFiles in secure mode. + * This suite is unable to verify the security handoff/turnover + * as miniCluster is running as system user thus has root privileges + * and delegation tokens don't seem to work on miniDFS. + * + * Thus SecureBulkload can only be completely verified by running + * integration tests against a secure cluster. This suite is still + * invaluable as it verifies the other mechanisms that need to be + * supported as part of a LoadIncrementalFiles call. + */ +@Category(LargeTests.class) +public class TestSecureLoadIncrementalHFiles extends TestLoadIncrementalHFiles{ + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + useSecure = true; + // setup configuration + SecureTestUtil.enableSecurity(util.getConfiguration()); + + util.startMiniCluster(); + + // Wait for the ACL table to become available + util.waitTableAvailable(AccessControlLists.ACL_TABLE_NAME, 5000); + } + +} + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java new file mode 100644 index 00000000000..e8593b2cdce --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.mapreduce; + +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.LargeTests; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + + +/** + * Reruns TestSecureLoadIncrementalHFilesSplitRecovery + * using LoadIncrementalHFiles in secure mode. + * This suite is unable to verify the security handoff/turnover + * as miniCluster is running as system user thus has root privileges + * and delegation tokens don't seem to work on miniDFS. + * + * Thus SecureBulkload can only be completely verified by running + * integration tests against a secure cluster. This suite is still + * invaluable as it verifies the other mechanisms that need to be + * supported as part of a LoadIncrementalFiles call. + */ +@Category(LargeTests.class) +public class TestSecureLoadIncrementalHFilesSplitRecovery extends TestLoadIncrementalHFilesSplitRecovery { + + //This "overrides" the parent static method + //make sure they are in sync + @BeforeClass + public static void setupCluster() throws Exception { + useSecure = true; + util = new HBaseTestingUtility(); + // setup configuration + SecureTestUtil.enableSecurity(util.getConfiguration()); + + util.startMiniCluster(); + + // Wait for the ACL table to become available + util.waitTableAvailable(AccessControlLists.ACL_TABLE_NAME, 5000); + } + + //Disabling this test as it does not work in secure mode + @Test + @Override + public void testBulkLoadPhaseFailure() { + } +} + diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index 9d85194954a..f069766b8a0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -31,7 +31,8 @@ public class SecureTestUtil { conf.set("hadoop.security.authorization", "false"); conf.set("hadoop.security.authentication", "simple"); conf.set("hbase.coprocessor.master.classes", AccessController.class.getName()); - conf.set("hbase.coprocessor.region.classes", AccessController.class.getName()); + conf.set("hbase.coprocessor.region.classes", AccessController.class.getName()+ + ","+SecureBulkLoadEndpoint.class.getName()); // add the process running user to superusers String currentUser = User.getCurrent().getName(); conf.set("hbase.superuser", "admin,"+currentUser); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index 1d3654f913a..9b38eb7666d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -30,12 +30,17 @@ import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.LargeTests; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.client.Append; @@ -53,6 +58,9 @@ import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; +import org.apache.hadoop.hbase.io.hfile.CacheConfig; +import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; @@ -199,23 +207,30 @@ public class TestAccessController { try { user.runAs(action); fail("Expected AccessDeniedException for user '" + user.getShortName() + "'"); - } catch (RetriesExhaustedWithDetailsException e) { - // in case of batch operations, and put, the client assembles a - // RetriesExhaustedWithDetailsException instead of throwing an - // AccessDeniedException + } catch (IOException e) { boolean isAccessDeniedException = false; - for (Throwable ex : e.getCauses()) { - if (ex instanceof ServiceException) { - ServiceException se = (ServiceException)ex; - if (se.getCause() != null && se.getCause() instanceof AccessDeniedException) { + if(e instanceof RetriesExhaustedWithDetailsException) { + // in case of batch operations, and put, the client assembles a + // RetriesExhaustedWithDetailsException instead of throwing an + // AccessDeniedException + for(Throwable ex : ((RetriesExhaustedWithDetailsException) e).getCauses()) { + if (ex instanceof AccessDeniedException) { isAccessDeniedException = true; break; } - } else if (ex instanceof AccessDeniedException) { - isAccessDeniedException = true; - break; } } + else { + // For doBulkLoad calls AccessDeniedException + // is buried in the stack trace + Throwable ex = e; + do { + if (ex instanceof AccessDeniedException) { + isAccessDeniedException = true; + break; + } + } while((ex = ex.getCause()) != null); + } if (!isAccessDeniedException) { fail("Not receiving AccessDeniedException for user '" + user.getShortName() + "'"); } @@ -233,8 +248,6 @@ public class TestAccessController { } } fail("Not receiving AccessDeniedException for user '" + user.getShortName() + "'"); - } catch (AccessDeniedException ade) { - // expected result } } } @@ -674,6 +687,104 @@ public class TestAccessController { verifyReadWrite(checkAndPut); } + @Test + public void testBulkLoad() throws Exception { + FileSystem fs = TEST_UTIL.getTestFileSystem(); + final Path dir = TEST_UTIL.getDataTestDir("testBulkLoad"); + fs.mkdirs(dir); + //need to make it globally writable + //so users creating HFiles have write permissions + fs.setPermission(dir, FsPermission.valueOf("-rwxrwxrwx")); + + PrivilegedExceptionAction bulkLoadAction = new PrivilegedExceptionAction() { + public Object run() throws Exception { + int numRows = 3; + + //Making the assumption that the test table won't split between the range + byte[][][] hfileRanges = {{{(byte)0}, {(byte)9}}}; + + Path bulkLoadBasePath = new Path(dir, new Path(User.getCurrent().getName())); + new BulkLoadHelper(bulkLoadBasePath) + .bulkLoadHFile(TEST_TABLE, TEST_FAMILY, Bytes.toBytes("q"), hfileRanges, numRows); + + return null; + } + }; + verifyWrite(bulkLoadAction); + } + + public class BulkLoadHelper { + private final FileSystem fs; + private final Path loadPath; + private final Configuration conf; + + public BulkLoadHelper(Path loadPath) throws IOException { + fs = TEST_UTIL.getTestFileSystem(); + conf = TEST_UTIL.getConfiguration(); + loadPath = loadPath.makeQualified(fs); + this.loadPath = loadPath; + } + + private void createHFile(Path path, + byte[] family, byte[] qualifier, + byte[] startKey, byte[] endKey, int numRows) throws IOException { + + HFile.Writer writer = null; + long now = System.currentTimeMillis(); + try { + writer = HFile.getWriterFactory(conf, new CacheConfig(conf)) + .withPath(fs, path) + .withComparator(KeyValue.KEY_COMPARATOR) + .create(); + // subtract 2 since numRows doesn't include boundary keys + for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows-2)) { + KeyValue kv = new KeyValue(key, family, qualifier, now, key); + writer.append(kv); + } + } finally { + if(writer != null) + writer.close(); + } + } + + private void bulkLoadHFile( + byte[] tableName, + byte[] family, + byte[] qualifier, + byte[][][] hfileRanges, + int numRowsPerRange) throws Exception { + + Path familyDir = new Path(loadPath, Bytes.toString(family)); + fs.mkdirs(familyDir); + int hfileIdx = 0; + for (byte[][] range : hfileRanges) { + byte[] from = range[0]; + byte[] to = range[1]; + createHFile(new Path(familyDir, "hfile_"+(hfileIdx++)), + family, qualifier, from, to, numRowsPerRange); + } + //set global read so RegionServer can move it + setPermission(loadPath, FsPermission.valueOf("-rwxrwxrwx")); + + HTable table = new HTable(conf, tableName); + TEST_UTIL.waitTableAvailable(tableName, 30000); + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); + loader.doBulkLoad(loadPath, table); + } + + public void setPermission(Path dir, FsPermission perm) throws IOException { + if(!fs.getFileStatus(dir).isDir()) { + fs.setPermission(dir,perm); + } + else { + for(FileStatus el : fs.listStatus(dir)) { + fs.setPermission(el.getPath(), perm); + setPermission(el.getPath() , perm); + } + } + } + } + @Test public void testAppend() throws Exception {