HBASE-23330: Fix delegation token fetch with MasterRegistry (#1084)

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Bharath Vissapragada 2020-01-24 18:04:21 -08:00
parent 1aa6a4efb9
commit d8b3f55518
64 changed files with 1693 additions and 81 deletions

View File

@ -33,6 +33,7 @@ import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
@ -278,6 +279,15 @@ class AsyncConnectionImpl implements AsyncConnection {
}, stub -> true, "master stub");
}
String getClusterId() {
try {
return registry.getClusterId().get();
} catch (InterruptedException | ExecutionException e) {
LOG.error("Error fetching cluster ID: ", e);
}
return null;
}
void clearMasterStubCache(MasterService.Interface stub) {
masterStub.compareAndSet(stub, null);
}

View File

@ -204,6 +204,11 @@ public interface Connection extends Abortable, Closeable {
*/
AsyncConnection toAsyncConnection();
/**
* @return the cluster ID unique to this HBase cluster.
*/
String getClusterId();
/**
* Retrieve an Hbck implementation to fix an HBase cluster.
* The returned Hbck is not guaranteed to be thread-safe. A new instance should be created by

View File

@ -204,6 +204,11 @@ class ConnectionOverAsyncConnection implements Connection {
return conn;
}
@Override
public String getClusterId() {
return conn.getClusterId();
}
@Override
public Hbck getHbck() throws IOException {
return FutureUtils.get(conn.getHbck());

View File

@ -246,5 +246,11 @@ public class TestMultiTableInputFormatBase {
public AsyncConnection toAsyncConnection() {
return null;
}
@Override
public String getClusterId() {
return null;
}
}
}

View File

@ -296,5 +296,10 @@ public class TestTableInputFormatBase {
public AsyncConnection toAsyncConnection() {
throw new UnsupportedOperationException();
}
@Override
public String getClusterId() {
return null;
}
}
}

View File

@ -105,4 +105,9 @@ public class SharedConnection implements Connection {
public AsyncConnection toAsyncConnection() {
return new SharedAsyncConnection(conn.toAsyncConnection());
}
@Override
public String getClusterId() {
return conn.getClusterId();
}
}

View File

@ -26,18 +26,14 @@ import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.security.token.Token;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility methods for obtaining authentication tokens.
*/
@ -219,7 +215,7 @@ public class TokenUtil {
public static void addTokenForJob(final Connection conn, final JobConf job, User user)
throws IOException, InterruptedException {
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn.getConfiguration(), user);
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn, user);
if (token == null) {
token = ClientTokenUtil.obtainToken(conn, user);
}
@ -238,7 +234,7 @@ public class TokenUtil {
*/
public static void addTokenForJob(final Connection conn, User user, Job job)
throws IOException, InterruptedException {
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn.getConfiguration(), user);
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn, user);
if (token == null) {
token = ClientTokenUtil.obtainToken(conn, user);
}
@ -257,7 +253,7 @@ public class TokenUtil {
*/
public static boolean addTokenIfMissing(Connection conn, User user)
throws IOException, InterruptedException {
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn.getConfiguration(), user);
Token<AuthenticationTokenIdentifier> token = getAuthToken(conn, user);
if (token == null) {
token = ClientTokenUtil.obtainToken(conn, user);
user.getUGI().addToken(token.getService(), token);
@ -270,19 +266,12 @@ public class TokenUtil {
* Get the authentication token of the user for the cluster specified in the configuration
* @return null if the user does not have the token, otherwise the auth token for the cluster.
*/
private static Token<AuthenticationTokenIdentifier> getAuthToken(Configuration conf, User user)
throws IOException, InterruptedException {
ZKWatcher zkw = new ZKWatcher(conf, "TokenUtil-getAuthToken", null);
try {
String clusterId = ZKClusterId.readClusterIdZNode(zkw);
if (clusterId == null) {
throw new IOException("Failed to get cluster ID");
}
return new AuthenticationTokenSelector().selectToken(new Text(clusterId), user.getTokens());
} catch (KeeperException e) {
throw new IOException(e);
} finally {
zkw.close();
private static Token<AuthenticationTokenIdentifier> getAuthToken(Connection conn, User user)
throws IOException {
final String clusterId = conn.getClusterId();
if (clusterId == null) {
throw new IOException("Failed to get cluster ID");
}
return new AuthenticationTokenSelector().selectToken(new Text(clusterId), user.getTokens());
}
}

View File

@ -202,6 +202,19 @@ public class ConnectionCache {
return false;
}
/**
* @return Cluster ID for the HBase cluster or null if there is an err making the connection.
*/
public String getClusterId() {
try {
ConnectionInfo connInfo = getCurrentConnection();
return connInfo.connection.getClusterId();
} catch (IOException e) {
LOG.error("Error getting connection: ", e);
}
return null;
}
class ConnectionInfo {
final Connection connection;
final String userName;

View File

@ -1272,6 +1272,11 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements Hb
return TThriftServerType.ONE;
}
@Override
public String getClusterId() throws TException {
return connectionCache.getClusterId();
}
private static IOError getIOError(Throwable throwable) {
IOError error = new IOErrorWithCause(throwable);
error.setMessage(Throwables.getStackTraceAsString(throwable));

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* An AlreadyExists exceptions signals that a table with the specified
* name already exists
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class AlreadyExists extends org.apache.thrift.TException implements org.apache.thrift.TBase<AlreadyExists, AlreadyExists._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyExists> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* A BatchMutation object is used to apply a number of Mutations to a single row.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class BatchMutation implements org.apache.thrift.TBase<BatchMutation, BatchMutation._Fields>, java.io.Serializable, Cloneable, Comparable<BatchMutation> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* such as the number of versions, compression settings, etc. It is
* used as input when creating a table or adding a column.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class ColumnDescriptor implements org.apache.thrift.TBase<ColumnDescriptor, ColumnDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class Hbase {
public interface Iface {
@ -622,6 +622,11 @@ public class Hbase {
*/
public TThriftServerType getThriftServerType() throws org.apache.thrift.TException;
/**
* Returns the cluster ID for this cluster.
*/
public java.lang.String getClusterId() throws org.apache.thrift.TException;
}
public interface AsyncIface {
@ -716,6 +721,8 @@ public class Hbase {
public void getThriftServerType(org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> resultHandler) throws org.apache.thrift.TException;
public void getClusterId(org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
@ -1976,6 +1983,28 @@ public class Hbase {
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getThriftServerType failed: unknown result");
}
public java.lang.String getClusterId() throws org.apache.thrift.TException
{
send_getClusterId();
return recv_getClusterId();
}
public void send_getClusterId() throws org.apache.thrift.TException
{
getClusterId_args args = new getClusterId_args();
sendBase("getClusterId", args);
}
public java.lang.String recv_getClusterId() throws org.apache.thrift.TException
{
getClusterId_result result = new getClusterId_result();
receiveBase(result, "getClusterId");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getClusterId failed: unknown result");
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
@ -3701,6 +3730,35 @@ public class Hbase {
}
}
public void getClusterId(org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {
checkReady();
getClusterId_call method_call = new getClusterId_call(resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getClusterId_call extends org.apache.thrift.async.TAsyncMethodCall<java.lang.String> {
public getClusterId_call(org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getClusterId", org.apache.thrift.protocol.TMessageType.CALL, 0));
getClusterId_args args = new getClusterId_args();
args.write(prot);
prot.writeMessageEnd();
}
public java.lang.String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new java.lang.IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getClusterId();
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
@ -3759,6 +3817,7 @@ public class Hbase {
processMap.put("append", new append());
processMap.put("checkAndPut", new checkAndPut());
processMap.put("getThriftServerType", new getThriftServerType());
processMap.put("getClusterId", new getClusterId());
return processMap;
}
@ -5094,6 +5153,31 @@ public class Hbase {
}
}
public static class getClusterId<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getClusterId_args> {
public getClusterId() {
super("getClusterId");
}
public getClusterId_args getEmptyArgsInstance() {
return new getClusterId_args();
}
protected boolean isOneway() {
return false;
}
@Override
protected boolean rethrowUnhandledExceptions() {
return false;
}
public getClusterId_result getResult(I iface, getClusterId_args args) throws org.apache.thrift.TException {
getClusterId_result result = new getClusterId_result();
result.success = iface.getClusterId();
return result;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
@ -5152,6 +5236,7 @@ public class Hbase {
processMap.put("append", new append());
processMap.put("checkAndPut", new checkAndPut());
processMap.put("getThriftServerType", new getThriftServerType());
processMap.put("getClusterId", new getClusterId());
return processMap;
}
@ -8112,6 +8197,67 @@ public class Hbase {
}
}
public static class getClusterId<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getClusterId_args, java.lang.String> {
public getClusterId() {
super("getClusterId");
}
public getClusterId_args getEmptyArgsInstance() {
return new getClusterId_args();
}
public org.apache.thrift.async.AsyncMethodCallback<java.lang.String> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new org.apache.thrift.async.AsyncMethodCallback<java.lang.String>() {
public void onComplete(java.lang.String o) {
getClusterId_result result = new getClusterId_result();
result.success = o;
try {
fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
} catch (org.apache.thrift.transport.TTransportException e) {
_LOGGER.error("TTransportException writing to internal frame buffer", e);
fb.close();
} catch (java.lang.Exception e) {
_LOGGER.error("Exception writing to internal frame buffer", e);
onError(e);
}
}
public void onError(java.lang.Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TSerializable msg;
getClusterId_result result = new getClusterId_result();
if (e instanceof org.apache.thrift.transport.TTransportException) {
_LOGGER.error("TTransportException inside handler", e);
fb.close();
return;
} else if (e instanceof org.apache.thrift.TApplicationException) {
_LOGGER.error("TApplicationException inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TApplicationException)e;
} else {
_LOGGER.error("Exception inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
} catch (java.lang.Exception ex) {
_LOGGER.error("Exception writing to internal frame buffer", ex);
fb.close();
}
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getClusterId_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {
iface.getClusterId(resultHandler);
}
}
}
public static class enableTable_args implements org.apache.thrift.TBase<enableTable_args, enableTable_args._Fields>, java.io.Serializable, Cloneable, Comparable<enableTable_args> {
@ -60584,4 +60730,625 @@ public class Hbase {
}
}
public static class getClusterId_args implements org.apache.thrift.TBase<getClusterId_args, getClusterId_args._Fields>, java.io.Serializable, Cloneable, Comparable<getClusterId_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getClusterId_args");
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getClusterId_argsStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getClusterId_argsTupleSchemeFactory();
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getClusterId_args.class, metaDataMap);
}
public getClusterId_args() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getClusterId_args(getClusterId_args other) {
}
public getClusterId_args deepCopy() {
return new getClusterId_args(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof getClusterId_args)
return this.equals((getClusterId_args)that);
return false;
}
public boolean equals(getClusterId_args that) {
if (that == null)
return false;
if (this == that)
return true;
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
return hashCode;
}
@Override
public int compareTo(getClusterId_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("getClusterId_args(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getClusterId_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_argsStandardScheme getScheme() {
return new getClusterId_argsStandardScheme();
}
}
private static class getClusterId_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<getClusterId_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getClusterId_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getClusterId_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getClusterId_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_argsTupleScheme getScheme() {
return new getClusterId_argsTupleScheme();
}
}
private static class getClusterId_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<getClusterId_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getClusterId_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getClusterId_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
public static class getClusterId_result implements org.apache.thrift.TBase<getClusterId_result, getClusterId_result._Fields>, java.io.Serializable, Cloneable, Comparable<getClusterId_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getClusterId_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getClusterId_resultStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getClusterId_resultTupleSchemeFactory();
public @org.apache.thrift.annotation.Nullable java.lang.String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getClusterId_result.class, metaDataMap);
}
public getClusterId_result() {
}
public getClusterId_result(
java.lang.String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getClusterId_result(getClusterId_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public getClusterId_result deepCopy() {
return new getClusterId_result(this);
}
@Override
public void clear() {
this.success = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getSuccess() {
return this.success;
}
public getClusterId_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof getClusterId_result)
return this.equals((getClusterId_result)that);
return false;
}
public boolean equals(getClusterId_result that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287);
if (isSetSuccess())
hashCode = hashCode * 8191 + success.hashCode();
return hashCode;
}
@Override
public int compareTo(getClusterId_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("getClusterId_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getClusterId_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_resultStandardScheme getScheme() {
return new getClusterId_resultStandardScheme();
}
}
private static class getClusterId_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<getClusterId_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getClusterId_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getClusterId_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getClusterId_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_resultTupleScheme getScheme() {
return new getClusterId_resultTupleScheme();
}
}
private static class getClusterId_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<getClusterId_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getClusterId_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getClusterId_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
}

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* to the Hbase master or an Hbase region server. Also used to return
* more general Hbase error conditions.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class IOError extends org.apache.thrift.TException implements org.apache.thrift.TBase<IOError, IOError._Fields>, java.io.Serializable, Cloneable, Comparable<IOError> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IOError");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* An IllegalArgument exception indicates an illegal or invalid
* argument was passed into a procedure.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class IllegalArgument extends org.apache.thrift.TException implements org.apache.thrift.TBase<IllegalArgument, IllegalArgument._Fields>, java.io.Serializable, Cloneable, Comparable<IllegalArgument> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IllegalArgument");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* A Mutation object is used to either update or delete a column-value.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class Mutation implements org.apache.thrift.TBase<Mutation, Mutation._Fields>, java.io.Serializable, Cloneable, Comparable<Mutation> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Mutation");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* An Append object is used to specify the parameters for performing the append operation.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TAppend implements org.apache.thrift.TBase<TAppend, TAppend._Fields>, java.io.Serializable, Cloneable, Comparable<TAppend> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend");

View File

@ -13,7 +13,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* the timestamp of a cell to a first-class value, making it easy to take
* note of temporal data. Cell is used all the way from HStore up to HTable.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TCell implements org.apache.thrift.TBase<TCell, TCell._Fields>, java.io.Serializable, Cloneable, Comparable<TCell> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCell");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* Holds column name and the cell.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TColumn> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* For increments that are not incrementColumnValue
* equivalents.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TIncrement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* A TRegionInfo contains information about an HTable region.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TRegionInfo implements org.apache.thrift.TBase<TRegionInfo, TRegionInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TRegionInfo> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRegionInfo");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* Holds row name and then a map of columns to cells.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TRowResult implements org.apache.thrift.TBase<TRowResult, TRowResult._Fields>, java.io.Serializable, Cloneable, Comparable<TRowResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowResult");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* A Scan object is used to specify scanner parameters when opening a scanner.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable, Comparable<TScan> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/**
* Specify type of thrift server: thrift and thrift2
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TThriftServerType implements org.apache.thrift.TEnum {
ONE(1),
TWO(2);

View File

@ -817,6 +817,11 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements TH
return TThriftServerType.TWO;
}
@Override
public String getClusterId() throws TException {
return connectionCache.getClusterId();
}
@Override
public List<TNamespaceDescriptor> listNamespaceDescriptors() throws TIOError, TException {
try {

View File

@ -54,6 +54,7 @@ import org.apache.http.client.utils.HttpClientUtils;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.protocol.HttpContext;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.protocol.TProtocol;
@ -63,11 +64,14 @@ import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
@InterfaceAudience.Private
public class ThriftConnection implements Connection {
private static final Logger LOG = LoggerFactory.getLogger(ThriftConnection.class);
private Configuration conf;
private User user;
// For HTTP protocol
@ -80,7 +84,8 @@ public class ThriftConnection implements Connection {
private boolean isFramed = false;
private boolean isCompact = false;
private ThriftClientBuilder clientBuilder;
// TODO: We can rip out the ThriftClient piece of it rather than creating a new client every time.
ThriftClientBuilder clientBuilder;
private int operationTimeout;
private int connectTimeout;
@ -145,10 +150,6 @@ public class ThriftConnection implements Connection {
return connectTimeout;
}
public ThriftClientBuilder getClientBuilder() {
return clientBuilder;
}
/**
* the default thrift client builder.
* One can extend the ThriftClientBuilder to builder custom client, implement
@ -334,7 +335,6 @@ public class ThriftConnection implements Connection {
} catch (IOException ioE) {
throw new RuntimeException(ioE);
}
}
};
}
@ -373,4 +373,15 @@ public class ThriftConnection implements Connection {
public AsyncConnection toAsyncConnection() {
throw new NotImplementedException("toAsyncConnection not supported in ThriftTable");
}
@Override
public String getClusterId() {
try {
Pair<THBaseService.Client, TTransport> client = clientBuilder.getClient();
return client.getFirst().getClusterId();
} catch (TException | IOException e) {
LOG.error("Error fetching cluster ID: ", e);
}
return null;
}
}

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TAppend implements org.apache.thrift.TBase<TAppend, TAppend._Fields>, java.io.Serializable, Cloneable, Comparable<TAppend> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TAuthorization implements org.apache.thrift.TBase<TAuthorization, TAuthorization._Fields>, java.io.Serializable, Cloneable, Comparable<TAuthorization> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAuthorization");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.regionserver.BloomType
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TBloomFilterType implements org.apache.thrift.TEnum {
/**
* Bloomfilters disabled

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TCellVisibility implements org.apache.thrift.TBase<TCellVisibility, TCellVisibility._Fields>, java.io.Serializable, Cloneable, Comparable<TCellVisibility> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCellVisibility");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* in a HBase table by column family and optionally
* a column qualifier and timestamp
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TColumn> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.client.ColumnFamilyDescriptor
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TColumnFamilyDescriptor implements org.apache.thrift.TBase<TColumnFamilyDescriptor, TColumnFamilyDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnFamilyDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnFamilyDescriptor");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/**
* Represents a single cell and the amount to increment it by
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TColumnIncrement implements org.apache.thrift.TBase<TColumnIncrement, TColumnIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnIncrement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnIncrement");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/**
* Represents a single cell and its value.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TColumnValue implements org.apache.thrift.TBase<TColumnValue, TColumnValue._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnValue> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnValue");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.CompareOperator.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TCompareOperator implements org.apache.thrift.TEnum {
LESS(0),
LESS_OR_EQUAL(1),

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.io.compress.Algorithm
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TCompressionAlgorithm implements org.apache.thrift.TEnum {
LZO(0),
GZ(1),

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* - STRONG means reads only from primary region
* - TIMELINE means reads might return values from secondary region replicas
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TConsistency implements org.apache.thrift.TEnum {
STRONG(1),
TIMELINE(2);

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TDataBlockEncoding implements org.apache.thrift.TEnum {
/**
* Disable data block encoding.

View File

@ -33,7 +33,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* by changing the durability. If you don't provide durability, it defaults to
* column family's default setting for durability.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TDelete implements org.apache.thrift.TBase<TDelete, TDelete._Fields>, java.io.Serializable, Cloneable, Comparable<TDelete> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TDelete");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* - DELETE_COLUMN means exactly one version will be removed,
* - DELETE_COLUMNS means previous versions will also be removed.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TDeleteType implements org.apache.thrift.TEnum {
DELETE_COLUMN(0),
DELETE_COLUMNS(1),

View File

@ -14,7 +14,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* - SYNC_WAL means write the Mutation to the WAL synchronously,
* - FSYNC_WAL means Write the Mutation to the WAL synchronously and force the entries to disk.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TDurability implements org.apache.thrift.TEnum {
USE_DEFAULT(0),
SKIP_WAL(1),

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* If you specify a time range and a timestamp the range is ignored.
* Timestamps on TColumns are ignored.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TGet implements org.apache.thrift.TBase<TGet, TGet._Fields>, java.io.Serializable, Cloneable, Comparable<TGet> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class THBaseService {
public interface Iface {
@ -507,6 +507,11 @@ public class THBaseService {
*/
public TThriftServerType getThriftServerType() throws org.apache.thrift.TException;
/**
* Returns the cluster ID for this cluster.
*/
public java.lang.String getClusterId() throws org.apache.thrift.TException;
}
public interface AsyncIface {
@ -605,6 +610,8 @@ public class THBaseService {
public void getThriftServerType(org.apache.thrift.async.AsyncMethodCallback<TThriftServerType> resultHandler) throws org.apache.thrift.TException;
public void getClusterId(org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
@ -1839,6 +1846,28 @@ public class THBaseService {
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getThriftServerType failed: unknown result");
}
public java.lang.String getClusterId() throws org.apache.thrift.TException
{
send_getClusterId();
return recv_getClusterId();
}
public void send_getClusterId() throws org.apache.thrift.TException
{
getClusterId_args args = new getClusterId_args();
sendBase("getClusterId", args);
}
public java.lang.String recv_getClusterId() throws org.apache.thrift.TException
{
getClusterId_result result = new getClusterId_result();
receiveBase(result, "getClusterId");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getClusterId failed: unknown result");
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
@ -3475,6 +3504,35 @@ public class THBaseService {
}
}
public void getClusterId(org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {
checkReady();
getClusterId_call method_call = new getClusterId_call(resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getClusterId_call extends org.apache.thrift.async.TAsyncMethodCall<java.lang.String> {
public getClusterId_call(org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getClusterId", org.apache.thrift.protocol.TMessageType.CALL, 0));
getClusterId_args args = new getClusterId_args();
args.write(prot);
prot.writeMessageEnd();
}
public java.lang.String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new java.lang.IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getClusterId();
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
@ -3535,6 +3593,7 @@ public class THBaseService {
processMap.put("listNamespaceDescriptors", new listNamespaceDescriptors());
processMap.put("listNamespaces", new listNamespaces());
processMap.put("getThriftServerType", new getThriftServerType());
processMap.put("getClusterId", new getClusterId());
return processMap;
}
@ -4911,6 +4970,31 @@ public class THBaseService {
}
}
public static class getClusterId<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getClusterId_args> {
public getClusterId() {
super("getClusterId");
}
public getClusterId_args getEmptyArgsInstance() {
return new getClusterId_args();
}
protected boolean isOneway() {
return false;
}
@Override
protected boolean rethrowUnhandledExceptions() {
return false;
}
public getClusterId_result getResult(I iface, getClusterId_args args) throws org.apache.thrift.TException {
getClusterId_result result = new getClusterId_result();
result.success = iface.getClusterId();
return result;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
@ -4971,6 +5055,7 @@ public class THBaseService {
processMap.put("listNamespaceDescriptors", new listNamespaceDescriptors());
processMap.put("listNamespaces", new listNamespaces());
processMap.put("getThriftServerType", new getThriftServerType());
processMap.put("getClusterId", new getClusterId());
return processMap;
}
@ -8026,6 +8111,67 @@ public class THBaseService {
}
}
public static class getClusterId<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getClusterId_args, java.lang.String> {
public getClusterId() {
super("getClusterId");
}
public getClusterId_args getEmptyArgsInstance() {
return new getClusterId_args();
}
public org.apache.thrift.async.AsyncMethodCallback<java.lang.String> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new org.apache.thrift.async.AsyncMethodCallback<java.lang.String>() {
public void onComplete(java.lang.String o) {
getClusterId_result result = new getClusterId_result();
result.success = o;
try {
fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
} catch (org.apache.thrift.transport.TTransportException e) {
_LOGGER.error("TTransportException writing to internal frame buffer", e);
fb.close();
} catch (java.lang.Exception e) {
_LOGGER.error("Exception writing to internal frame buffer", e);
onError(e);
}
}
public void onError(java.lang.Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TSerializable msg;
getClusterId_result result = new getClusterId_result();
if (e instanceof org.apache.thrift.transport.TTransportException) {
_LOGGER.error("TTransportException inside handler", e);
fb.close();
return;
} else if (e instanceof org.apache.thrift.TApplicationException) {
_LOGGER.error("TApplicationException inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TApplicationException)e;
} else {
_LOGGER.error("Exception inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
} catch (java.lang.Exception ex) {
_LOGGER.error("Exception writing to internal frame buffer", ex);
fb.close();
}
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getClusterId_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {
iface.getClusterId(resultHandler);
}
}
}
public static class exists_args implements org.apache.thrift.TBase<exists_args, exists_args._Fields>, java.io.Serializable, Cloneable, Comparable<exists_args> {
@ -52112,4 +52258,625 @@ public class THBaseService {
}
}
public static class getClusterId_args implements org.apache.thrift.TBase<getClusterId_args, getClusterId_args._Fields>, java.io.Serializable, Cloneable, Comparable<getClusterId_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getClusterId_args");
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getClusterId_argsStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getClusterId_argsTupleSchemeFactory();
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getClusterId_args.class, metaDataMap);
}
public getClusterId_args() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getClusterId_args(getClusterId_args other) {
}
public getClusterId_args deepCopy() {
return new getClusterId_args(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof getClusterId_args)
return this.equals((getClusterId_args)that);
return false;
}
public boolean equals(getClusterId_args that) {
if (that == null)
return false;
if (this == that)
return true;
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
return hashCode;
}
@Override
public int compareTo(getClusterId_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("getClusterId_args(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getClusterId_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_argsStandardScheme getScheme() {
return new getClusterId_argsStandardScheme();
}
}
private static class getClusterId_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<getClusterId_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getClusterId_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getClusterId_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getClusterId_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_argsTupleScheme getScheme() {
return new getClusterId_argsTupleScheme();
}
}
private static class getClusterId_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<getClusterId_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getClusterId_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getClusterId_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
public static class getClusterId_result implements org.apache.thrift.TBase<getClusterId_result, getClusterId_result._Fields>, java.io.Serializable, Cloneable, Comparable<getClusterId_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getClusterId_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new getClusterId_resultStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new getClusterId_resultTupleSchemeFactory();
public @org.apache.thrift.annotation.Nullable java.lang.String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getClusterId_result.class, metaDataMap);
}
public getClusterId_result() {
}
public getClusterId_result(
java.lang.String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getClusterId_result(getClusterId_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public getClusterId_result deepCopy() {
return new getClusterId_result(this);
}
@Override
public void clear() {
this.success = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getSuccess() {
return this.success;
}
public getClusterId_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof getClusterId_result)
return this.equals((getClusterId_result)that);
return false;
}
public boolean equals(getClusterId_result that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287);
if (isSetSuccess())
hashCode = hashCode * 8191 + success.hashCode();
return hashCode;
}
@Override
public int compareTo(getClusterId_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("getClusterId_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getClusterId_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_resultStandardScheme getScheme() {
return new getClusterId_resultStandardScheme();
}
}
private static class getClusterId_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<getClusterId_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getClusterId_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getClusterId_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getClusterId_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public getClusterId_resultTupleScheme getScheme() {
return new getClusterId_resultTupleScheme();
}
}
private static class getClusterId_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<getClusterId_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getClusterId_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getClusterId_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
}

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class THRegionInfo implements org.apache.thrift.TBase<THRegionInfo, THRegionInfo._Fields>, java.io.Serializable, Cloneable, Comparable<THRegionInfo> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionInfo");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class THRegionLocation implements org.apache.thrift.TBase<THRegionLocation, THRegionLocation._Fields>, java.io.Serializable, Cloneable, Comparable<THRegionLocation> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionLocation");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* to the HBase master or a HBase region server. Also used to return
* more general HBase error conditions.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TIOError extends org.apache.thrift.TException implements org.apache.thrift.TBase<TIOError, TIOError._Fields>, java.io.Serializable, Cloneable, Comparable<TIOError> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIOError");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* A TIllegalArgument exception indicates an illegal or invalid
* argument was passed into a procedure.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TIllegalArgument extends org.apache.thrift.TException implements org.apache.thrift.TBase<TIllegalArgument, TIllegalArgument._Fields>, java.io.Serializable, Cloneable, Comparable<TIllegalArgument> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIllegalArgument");

View File

@ -14,7 +14,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* by changing the durability. If you don't provide durability, it defaults to
* column family's default setting for durability.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TIncrement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.KeepDeletedCells
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TKeepDeletedCells implements org.apache.thrift.TEnum {
/**
* Deleted Cells are not retained.

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/**
* Atomic mutation for the specified row. It can be either Put or Delete.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TMutation extends org.apache.thrift.TUnion<TMutation, TMutation._Fields> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TMutation");
private static final org.apache.thrift.protocol.TField PUT_FIELD_DESC = new org.apache.thrift.protocol.TField("put", org.apache.thrift.protocol.TType.STRUCT, (short)1);

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.NamespaceDescriptor
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TNamespaceDescriptor implements org.apache.thrift.TBase<TNamespaceDescriptor, TNamespaceDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TNamespaceDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TNamespaceDescriptor");

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* by changing the durability. If you don't provide durability, it defaults to
* column family's default setting for durability.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TPut implements org.apache.thrift.TBase<TPut, TPut._Fields>, java.io.Serializable, Cloneable, Comparable<TPut> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPut");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TReadType implements org.apache.thrift.TEnum {
DEFAULT(1),
STREAM(2),

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/**
* if no Result is found, row and columnValues will not be set.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TResult implements org.apache.thrift.TBase<TResult, TResult._Fields>, java.io.Serializable, Cloneable, Comparable<TResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TResult");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/**
* A TRowMutations object is used to apply a number of Mutations to a single row.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TRowMutations implements org.apache.thrift.TBase<TRowMutations, TRowMutations._Fields>, java.io.Serializable, Cloneable, Comparable<TRowMutations> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowMutations");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Any timestamps in the columns are ignored but the colFamTimeRangeMap included, use timeRange to select by timestamp.
* Max versions defaults to 1.
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable, Comparable<TScan> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TServerName implements org.apache.thrift.TBase<TServerName, TServerName._Fields>, java.io.Serializable, Cloneable, Comparable<TServerName> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TServerName");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.client.TableDescriptor
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TTableDescriptor implements org.apache.thrift.TBase<TTableDescriptor, TTableDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TTableDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableDescriptor");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around
* org.apache.hadoop.hbase.TableName
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TTableName implements org.apache.thrift.TBase<TTableName, TTableName._Fields>, java.io.Serializable, Cloneable, Comparable<TTableName> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableName");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/**
* Specify type of thrift server: thrift and thrift2
*/
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public enum TThriftServerType implements org.apache.thrift.TEnum {
ONE(1),
TWO(2);

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-11-07")
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-01-22")
public class TTimeRange implements org.apache.thrift.TBase<TTimeRange, TTimeRange._Fields>, java.io.Serializable, Cloneable, Comparable<TTimeRange> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTimeRange");

View File

@ -972,4 +972,9 @@ service Hbase {
* @return the type of this thrift server
*/
TThriftServerType getThriftServerType()
/**
* Returns the cluster ID for this cluster.
*/
string getClusterId()
}

View File

@ -1053,4 +1053,9 @@ service THBaseService {
* @return the type of this thrift server
*/
TThriftServerType getThriftServerType()
/**
* Returns the cluster ID for this cluster.
*/
string getClusterId()
}

View File

@ -196,10 +196,19 @@ public class TestThriftConnection {
}
@Test
public void testThrfitAdmin() throws Exception {
testThriftAdmin(thriftConnection, "testThrfitAdminNamesapce", "testThrfitAdminTable");
testThriftAdmin(thriftHttpConnection, "testThrfitHttpAdminNamesapce",
"testThrfitHttpAdminTable");
public void testGetClusterId() {
String actualClusterId = TEST_UTIL.getMiniHBaseCluster().getMaster().getClusterId();
for (Connection conn: new Connection[] {thriftConnection, thriftHttpConnection}) {
String thriftClusterId = conn.getClusterId();
assertEquals(actualClusterId, thriftClusterId);
}
}
@Test
public void testThriftAdmin() throws Exception {
testThriftAdmin(thriftConnection, "testThriftAdminNamespace", "testThriftAdminTable");
testThriftAdmin(thriftHttpConnection, "testThriftHttpAdminNamespace",
"testThriftHttpAdminTable");
}
@Test
@ -209,7 +218,7 @@ public class TestThriftConnection {
}
public void testGet(Connection connection, String tableName) throws IOException {
private void testGet(Connection connection, String tableName) throws IOException {
createTable(thriftAdmin, tableName);
try (Table table = connection.getTable(TableName.valueOf(tableName))){
Get get = new Get(ROW_1);