From f053003ce7e8d9c86b2ff762b646d69e5e04cfe2 Mon Sep 17 00:00:00 2001 From: Allan Yang Date: Wed, 9 Jan 2019 15:38:23 +0800 Subject: [PATCH] HBASE-21661 Provide Thrift2 implementation of Table/Admin --- .../apache/hadoop/hbase/thrift/Constants.java | 8 + .../hadoop/hbase/thrift/ThriftServer.java | 11 + .../hbase/thrift/generated/AlreadyExists.java | 2 +- .../hbase/thrift/generated/BatchMutation.java | 2 +- .../thrift/generated/ColumnDescriptor.java | 2 +- .../hadoop/hbase/thrift/generated/Hbase.java | 2 +- .../hbase/thrift/generated/IOError.java | 2 +- .../thrift/generated/IllegalArgument.java | 2 +- .../hbase/thrift/generated/Mutation.java | 2 +- .../hbase/thrift/generated/TAppend.java | 2 +- .../hadoop/hbase/thrift/generated/TCell.java | 2 +- .../hbase/thrift/generated/TColumn.java | 2 +- .../hbase/thrift/generated/TIncrement.java | 2 +- .../hbase/thrift/generated/TRegionInfo.java | 2 +- .../hbase/thrift/generated/TRowResult.java | 2 +- .../hadoop/hbase/thrift/generated/TScan.java | 2 +- .../thrift2/ThriftHBaseServiceHandler.java | 4 +- .../hadoop/hbase/thrift2/ThriftUtilities.java | 437 +++++ .../hbase/thrift2/client/ThriftAdmin.java | 1405 +++++++++++++++++ .../thrift2/client/ThriftClientBuilder.java | 37 + .../thrift2/client/ThriftConnection.java | 322 ++++ .../hbase/thrift2/client/ThriftTable.java | 492 ++++++ .../hbase/thrift2/generated/TAppend.java | 2 +- .../thrift2/generated/TAuthorization.java | 2 +- .../thrift2/generated/TCellVisibility.java | 2 +- .../hbase/thrift2/generated/TColumn.java | 2 +- .../generated/TColumnFamilyDescriptor.java | 2 +- .../thrift2/generated/TColumnIncrement.java | 2 +- .../hbase/thrift2/generated/TColumnValue.java | 2 +- .../hbase/thrift2/generated/TDelete.java | 2 +- .../hadoop/hbase/thrift2/generated/TGet.java | 127 +- .../thrift2/generated/THBaseService.java | 122 +- .../hbase/thrift2/generated/THRegionInfo.java | 2 +- .../thrift2/generated/THRegionLocation.java | 2 +- .../hbase/thrift2/generated/TIOError.java | 2 +- .../thrift2/generated/TIllegalArgument.java | 2 +- .../hbase/thrift2/generated/TIncrement.java | 2 +- .../generated/TNamespaceDescriptor.java | 2 +- .../hadoop/hbase/thrift2/generated/TPut.java | 2 +- .../hbase/thrift2/generated/TResult.java | 2 +- .../thrift2/generated/TRowMutations.java | 2 +- .../hadoop/hbase/thrift2/generated/TScan.java | 127 +- .../hbase/thrift2/generated/TServerName.java | 2 +- .../thrift2/generated/TTableDescriptor.java | 2 +- .../hbase/thrift2/generated/TTableName.java | 50 +- .../hbase/thrift2/generated/TTimeRange.java | 2 +- .../apache/hadoop/hbase/thrift2/hbase.thrift | 13 +- .../hbase/thrift2/TestThriftConnection.java | 841 ++++++++++ 48 files changed, 3946 insertions(+), 118 deletions(-) create mode 100644 hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java create mode 100644 hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.java create mode 100644 hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftConnection.java create mode 100644 hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftTable.java create mode 100644 hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftConnection.java diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java index 8e3d0048a3d..55f2499a692 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/Constants.java @@ -144,8 +144,16 @@ public final class Constants { public static final String THRIFT_READONLY_ENABLED = "hbase.thrift.readonly"; public static final boolean THRIFT_READONLY_ENABLED_DEFAULT = false; + public static final String HBASE_THRIFT_CLIENT_SCANNER_CACHING = + "hbase.thrift.client.scanner.caching"; + public static final int HBASE_THRIFT_CLIENT_SCANNER_CACHING_DEFAULT = 20; + public static final String HBASE_THRIFT_SERVER_NAME = "hbase.thrift.server.name"; + public static final String HBASE_THRIFT_SERVER_PORT = "hbase.thrift.server.port"; + + public static final String HBASE_THRIFT_CLIENT_BUIDLER_CLASS = + "hbase.thrift.client.builder.class"; } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java index 6d11ac6f267..830ce527b14 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java @@ -141,6 +141,7 @@ import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.common.base.Joiner; import org.apache.hbase.thirdparty.com.google.common.base.Splitter; import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -277,6 +278,16 @@ public class ThriftServer extends Configured implements Tool { HbaseHandlerMetricsProxy.newInstance((Hbase.Iface) hbaseServiceHandler, metrics, conf)); } + @VisibleForTesting + public TServer getTserver() { + return tserver; + } + + @VisibleForTesting + public Server getHttpServer() { + return httpServer; + } + protected void printUsageAndExit(Options options, int exitCode) throws ExitCodeException { HelpFormatter formatter = new HelpFormatter(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java index 4457b9f13c7..117165ec89c 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * An AlreadyExists exceptions signals that a table with the specified * name already exists */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class AlreadyExists extends TException implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java index f605286af16..27e3550c8e0 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * A BatchMutation object is used to apply a number of Mutations to a single row. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class BatchMutation implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java index c0e885d5623..36b7e4f6c46 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java @@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory; * such as the number of versions, compression settings, etc. It is * used as input when creating a table or adding a column. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class ColumnDescriptor implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java index 54bb4ce2d3d..38707116636 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class Hbase { public interface Iface { diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java index 09d076ca9ec..13f7b4ef096 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java @@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory; * to the Hbase master or an Hbase region server. Also used to return * more general Hbase error conditions. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class IOError extends TException implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IOError"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java index bfe43805ac5..df9fe41b26f 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * An IllegalArgument exception indicates an illegal or invalid * argument was passed into a procedure. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class IllegalArgument extends TException implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IllegalArgument"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java index 967e3d1c68e..7c9c439ca36 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * A Mutation object is used to either update or delete a column-value. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class Mutation implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Mutation"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TAppend.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TAppend.java index 599f645d74e..46c0fa0d814 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TAppend.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TAppend.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * An Append object is used to specify the parameters for performing the append operation. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TAppend implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java index c8f35768aa9..db31d9e11e6 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java @@ -40,7 +40,7 @@ import org.slf4j.LoggerFactory; * the timestamp of a cell to a first-class value, making it easy to take * note of temporal data. Cell is used all the way from HStore up to HTable. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TCell implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCell"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java index 0ce1d75b9d3..1f7d34101bc 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TColumn.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * Holds column name and the cell. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TColumn implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java index 6c0827bf953..12dbe730151 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * For increments that are not incrementColumnValue * equivalents. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TIncrement implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java index d2d3e0eec32..3870cae590d 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * A TRegionInfo contains information about an HTable region. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TRegionInfo implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRegionInfo"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java index 6520007977d..da656875c2f 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * Holds row name and then a map of columns to cells. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TRowResult implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowResult"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java index fcc237d6c2e..0aabf0bf0be 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * A Scan object is used to specify scanner parameters when opening a scanner. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TScan implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java index a9ec6464ccc..519a16bf361 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java @@ -575,7 +575,7 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements TH public List getTableDescriptorsByPattern(String regex, boolean includeSysTables) throws TIOError, TException { try { - Pattern pattern = Pattern.compile(regex); + Pattern pattern = (regex == null ? null : Pattern.compile(regex)); List tableDescriptors = connectionCache.getAdmin() .listTableDescriptors(pattern, includeSysTables); return tableDescriptorsFromHBase(tableDescriptors); @@ -600,7 +600,7 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements TH public List getTableNamesByPattern(String regex, boolean includeSysTables) throws TIOError, TException { try { - Pattern pattern = Pattern.compile(regex); + Pattern pattern = (regex == null ? null : Pattern.compile(regex)); TableName[] tableNames = connectionCache.getAdmin() .listTableNames(pattern, includeSysTables); return tableNamesFromHBase(tableNames); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 63c70071031..3496726b8da 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -25,15 +25,19 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.NavigableSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderType; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.ExtendedCellBuilder; +import org.apache.hadoop.hbase.ExtendedCellBuilderFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -47,6 +51,7 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.OperationWithAttributes; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -55,14 +60,20 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan.ReadType; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.exceptions.DeserializationException; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.ParseFilter; +import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.thrift2.generated.TAppend; +import org.apache.hadoop.hbase.thrift2.generated.TAuthorization; import org.apache.hadoop.hbase.thrift2.generated.TBloomFilterType; +import org.apache.hadoop.hbase.thrift2.generated.TCellVisibility; import org.apache.hadoop.hbase.thrift2.generated.TColumn; import org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor; import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement; @@ -179,6 +190,14 @@ public final class ThriftUtilities { out.addFamily(column.getFamily()); } } + if (in.isSetFilterBytes()) { + try { + Filter filter = FilterBase.parseFrom(in.getFilterBytes()); + out.setFilter(filter); + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + } return out; } @@ -579,6 +598,80 @@ public final class ThriftUtilities { out.setReplicaId(in.getTargetReplicaId()); } + if (in.isSetFilterBytes()) { + try { + Filter filter = FilterBase.parseFrom(in.getFilterBytes()); + out.setFilter(filter); + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + } + + return out; + } + + public static TScan scanFromHBase(Scan in) throws IOException { + TScan out = new TScan(); + out.setStartRow(in.getStartRow()); + out.setStopRow(in.getStopRow()); + out.setCaching(in.getCaching()); + out.setMaxVersions(in.getMaxVersions()); + for (Map.Entry> family : in.getFamilyMap().entrySet()) { + + if (family.getValue() != null && !family.getValue().isEmpty()) { + for (byte[] qualifier : family.getValue()) { + TColumn column = new TColumn(); + column.setFamily(family.getKey()); + column.setQualifier(qualifier); + out.addToColumns(column); + } + } else { + TColumn column = new TColumn(); + column.setFamily(family.getKey()); + out.addToColumns(column); + } + } + TTimeRange tTimeRange = new TTimeRange(); + tTimeRange.setMinStamp(in.getTimeRange().getMin()).setMaxStamp(in.getTimeRange().getMax()); + out.setTimeRange(tTimeRange); + out.setBatchSize(in.getBatch()); + + for (Map.Entry attribute : in.getAttributesMap().entrySet()) { + out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), + ByteBuffer.wrap(attribute.getValue())); + } + + try { + Authorizations authorizations = in.getAuthorizations(); + if (authorizations != null) { + TAuthorization tAuthorization = new TAuthorization(); + tAuthorization.setLabels(authorizations.getLabels()); + out.setAuthorizations(tAuthorization); + } + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + + out.setReversed(in.isReversed()); + out.setCacheBlocks(in.getCacheBlocks()); + out.setReadType(readTypeFromHBase(in.getReadType())); + out.setLimit(in.getLimit()); + out.setConsistency(consistencyFromHBase(in.getConsistency())); + out.setTargetReplicaId(in.getReplicaId()); + for (Map.Entry entry : in.getColumnFamilyTimeRange().entrySet()) { + if (entry.getValue() != null) { + TTimeRange timeRange = new TTimeRange(); + timeRange.setMinStamp(entry.getValue().getMin()).setMaxStamp(entry.getValue().getMax()); + out.putToColFamTimeRangeMap(ByteBuffer.wrap(entry.getKey()), timeRange); + } + } + if (in.getFilter() != null) { + try { + out.setFilterBytes(in.getFilter().toByteArray()); + } catch (IOException ioE) { + throw new RuntimeException(ioE); + } + } return out; } @@ -713,6 +806,15 @@ public final class ThriftUtilities { } } + private static TReadType readTypeFromHBase(ReadType readType) { + switch (readType) { + case DEFAULT: return TReadType.DEFAULT; + case STREAM: return TReadType.STREAM; + case PREAD: return TReadType.PREAD; + default: return TReadType.DEFAULT; + } + } + private static Consistency consistencyFromThrift(TConsistency tConsistency) { switch (tConsistency.getValue()) { case 1: return Consistency.STRONG; @@ -725,6 +827,15 @@ public final class ThriftUtilities { return TableName.valueOf(tableName.getNs(), tableName.getQualifier()); } + public static TableName[] tableNamesArrayFromThrift(List tableNames) { + TableName[] out = new TableName[tableNames.size()]; + int index = 0; + for (TTableName tableName : tableNames) { + out[index++] = tableNameFromThrift(tableName); + } + return out; + } + public static List tableNamesFromThrift(List tableNames) { List out = new ArrayList<>(tableNames.size()); for (TTableName tableName : tableNames) { @@ -740,6 +851,14 @@ public final class ThriftUtilities { return tableName; } + public static List tableNamesFromHBase(List in) { + List out = new ArrayList<>(in.size()); + for (TableName tableName : in) { + out.add(tableNameFromHBase(tableName)); + } + return out; + } + public static List tableNamesFromHBase(TableName[] in) { List out = new ArrayList<>(in.length); for (TableName tableName : in) { @@ -920,6 +1039,28 @@ public final class ThriftUtilities { return builder.build(); } + public static HTableDescriptor hTableDescriptorFromThrift(TTableDescriptor in) { + return new HTableDescriptor(tableDescriptorFromThrift(in)); + } + + public static HTableDescriptor[] hTableDescriptorsFromThrift(List in) { + HTableDescriptor[] out = new HTableDescriptor[in.size()]; + int index = 0; + for (TTableDescriptor tTableDescriptor : in) { + out[index++] = hTableDescriptorFromThrift(tTableDescriptor); + } + return out; + } + + + public static List tableDescriptorsFromThrift(List in) { + List out = new ArrayList<>(); + for (TTableDescriptor tableDescriptor : in) { + out.add(tableDescriptorFromThrift(tableDescriptor)); + } + return out; + } + private static TDurability durabilityFromHBase(Durability durability) { switch (durability) { case USE_DEFAULT: return TDurability.USE_DEFAULT; @@ -1038,4 +1179,300 @@ public final class ThriftUtilities { return out; } + + private static TConsistency consistencyFromHBase(Consistency consistency) { + switch (consistency) { + case STRONG: return TConsistency.STRONG; + case TIMELINE: return TConsistency.TIMELINE; + default: return TConsistency.STRONG; + } + } + + public static TGet getFromHBase(Get in) { + TGet out = new TGet(); + out.setRow(in.getRow()); + + TTimeRange tTimeRange = new TTimeRange(); + tTimeRange.setMaxStamp(in.getTimeRange().getMax()).setMinStamp(in.getTimeRange().getMin()); + out.setTimeRange(tTimeRange); + out.setMaxVersions(in.getMaxVersions()); + + for (Map.Entry attribute : in.getAttributesMap().entrySet()) { + out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), + ByteBuffer.wrap(attribute.getValue())); + } + try { + Authorizations authorizations = in.getAuthorizations(); + if (authorizations != null) { + TAuthorization tAuthorization = new TAuthorization(); + tAuthorization.setLabels(authorizations.getLabels()); + out.setAuthorizations(tAuthorization); + } + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + out.setConsistency(consistencyFromHBase(in.getConsistency())); + out.setTargetReplicaId(in.getReplicaId()); + out.setCacheBlocks(in.getCacheBlocks()); + out.setStoreLimit(in.getMaxResultsPerColumnFamily()); + out.setStoreOffset(in.getRowOffsetPerColumnFamily()); + out.setExistence_only(in.isCheckExistenceOnly()); + for (Map.Entry> family : in.getFamilyMap().entrySet()) { + + if (family.getValue() != null && !family.getValue().isEmpty()) { + for (byte[] qualifier : family.getValue()) { + TColumn column = new TColumn(); + column.setFamily(family.getKey()); + column.setQualifier(qualifier); + out.addToColumns(column); + } + } else { + TColumn column = new TColumn(); + column.setFamily(family.getKey()); + out.addToColumns(column); + } + } + if (in.getFilter() != null) { + try { + out.setFilterBytes(in.getFilter().toByteArray()); + } catch (IOException ioE) { + throw new RuntimeException(ioE); + } + } + return out; + } + + public static Cell toCell(ExtendedCellBuilder cellBuilder, byte[] row, TColumnValue columnValue) { + return cellBuilder.clear() + .setRow(row) + .setFamily(columnValue.getFamily()) + .setQualifier(columnValue.getQualifier()) + .setTimestamp(columnValue.getTimestamp()) + .setType(columnValue.getType()) + .setValue(columnValue.getValue()) + .setTags(columnValue.getTags()) + .build(); + } + + + + + + + + public static Result resultFromThrift(TResult in) { + if (in == null) { + return null; + } + if (!in.isSetColumnValues() || in.getColumnValues().isEmpty()){ + return in.isStale() ? EMPTY_RESULT_STALE : EMPTY_RESULT; + } + List cells = new ArrayList<>(in.getColumnValues().size()); + ExtendedCellBuilder builder = ExtendedCellBuilderFactory.create(CellBuilderType.SHALLOW_COPY); + for (TColumnValue columnValue : in.getColumnValues()) { + cells.add(toCell(builder, in.getRow(), columnValue)); + } + return Result.create(cells, null, in.isStale(), in.isPartial()); + } + + public static TPut putFromHBase(Put in) { + TPut out = new TPut(); + out.setRow(in.getRow()); + if (in.getTimestamp() != HConstants.LATEST_TIMESTAMP) { + out.setTimestamp(in.getTimestamp()); + } + if (in.getDurability() != Durability.USE_DEFAULT) { + out.setDurability(durabilityFromHBase(in.getDurability())); + } + for (Map.Entry> entry : in.getFamilyCellMap().entrySet()) { + byte[] family = entry.getKey(); + for (Cell cell : entry.getValue()) { + TColumnValue columnValue = new TColumnValue(); + columnValue.setFamily(family) + .setQualifier(CellUtil.cloneQualifier(cell)) + .setType(cell.getType().getCode()) + .setTimestamp(cell.getTimestamp()) + .setValue(CellUtil.cloneValue(cell)); + if (cell.getTagsLength() != 0) { + columnValue.setTags(CellUtil.cloneTags(cell)); + } + out.addToColumnValues(columnValue); + } + } + for (Map.Entry attribute : in.getAttributesMap().entrySet()) { + out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), + ByteBuffer.wrap(attribute.getValue())); + } + try { + CellVisibility cellVisibility = in.getCellVisibility(); + if (cellVisibility != null) { + TCellVisibility tCellVisibility = new TCellVisibility(); + tCellVisibility.setExpression(cellVisibility.getExpression()); + out.setCellVisibility(tCellVisibility); + } + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + return out; + } + + public static List putsFromHBase(List in) { + List out = new ArrayList<>(in.size()); + for (Put put : in) { + out.add(putFromHBase(put)); + } + return out; + } + + public static NamespaceDescriptor[] namespaceDescriptorsFromThrift( + List in) { + NamespaceDescriptor[] out = new NamespaceDescriptor[in.size()]; + int index = 0; + for (TNamespaceDescriptor descriptor : in) { + out[index++] = namespaceDescriptorFromThrift(descriptor); + } + return out; + } + + public static List deletesFromHBase(List in) { + List out = new ArrayList<>(in.size()); + for (Delete delete : in) { + out.add(deleteFromHBase(delete)); + } + return out; + } + + public static TAppend appendFromHBase(Append in) throws IOException { + TAppend out = new TAppend(); + out.setRow(in.getRow()); + + if (in.getDurability() != Durability.USE_DEFAULT) { + out.setDurability(durabilityFromHBase(in.getDurability())); + } + for (Map.Entry> entry : in.getFamilyCellMap().entrySet()) { + byte[] family = entry.getKey(); + for (Cell cell : entry.getValue()) { + TColumnValue columnValue = new TColumnValue(); + columnValue.setFamily(family) + .setQualifier(CellUtil.cloneQualifier(cell)) + .setType(cell.getType().getCode()) + .setTimestamp(cell.getTimestamp()) + .setValue(CellUtil.cloneValue(cell)); + if (cell.getTagsLength() != 0) { + columnValue.setTags(CellUtil.cloneTags(cell)); + } + out.addToColumns(columnValue); + } + } + for (Map.Entry attribute : in.getAttributesMap().entrySet()) { + out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), + ByteBuffer.wrap(attribute.getValue())); + } + try { + CellVisibility cellVisibility = in.getCellVisibility(); + if (cellVisibility != null) { + TCellVisibility tCellVisibility = new TCellVisibility(); + tCellVisibility.setExpression(cellVisibility.getExpression()); + out.setCellVisibility(tCellVisibility); + } + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + out.setReturnResults(in.isReturnResults()); + return out; + } + + public static TIncrement incrementFromHBase(Increment in) throws IOException { + TIncrement out = new TIncrement(); + out.setRow(in.getRow()); + + if (in.getDurability() != Durability.USE_DEFAULT) { + out.setDurability(durabilityFromHBase(in.getDurability())); + } + for (Map.Entry> entry : in.getFamilyCellMap().entrySet()) { + byte[] family = entry.getKey(); + for (Cell cell : entry.getValue()) { + TColumnIncrement columnValue = new TColumnIncrement(); + columnValue.setFamily(family).setQualifier(CellUtil.cloneQualifier(cell)); + columnValue.setAmount( + Bytes.toLong(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); + out.addToColumns(columnValue); + } + } + for (Map.Entry attribute : in.getAttributesMap().entrySet()) { + out.putToAttributes(ByteBuffer.wrap(Bytes.toBytes(attribute.getKey())), + ByteBuffer.wrap(attribute.getValue())); + } + try { + CellVisibility cellVisibility = in.getCellVisibility(); + if (cellVisibility != null) { + TCellVisibility tCellVisibility = new TCellVisibility(); + tCellVisibility.setExpression(cellVisibility.getExpression()); + out.setCellVisibility(tCellVisibility); + } + } catch (DeserializationException e) { + throw new RuntimeException(e); + } + out.setReturnResults(in.isReturnResults()); + return out; + } + + public static TRowMutations rowMutationsFromHBase(RowMutations in) { + TRowMutations tRowMutations = new TRowMutations(); + tRowMutations.setRow(in.getRow()); + for (Mutation mutation : in.getMutations()) { + TMutation tMutation = new TMutation(); + if (mutation instanceof Put) { + tMutation.setPut(ThriftUtilities.putFromHBase((Put)mutation)); + } else if (mutation instanceof Delete) { + tMutation.setDeleteSingle(ThriftUtilities.deleteFromHBase((Delete)mutation)); + } else { + throw new IllegalArgumentException( + "Only Put and Delete is supported in mutateRow, but muation=" + mutation); + } + tRowMutations.addToMutations(tMutation); + } + return tRowMutations; + } + + public static TCompareOp compareOpFromHBase(CompareOperator compareOp) { + switch (compareOp) { + case LESS: return TCompareOp.LESS; + case LESS_OR_EQUAL: return TCompareOp.LESS_OR_EQUAL; + case EQUAL: return TCompareOp.EQUAL; + case NOT_EQUAL: return TCompareOp.NOT_EQUAL; + case GREATER_OR_EQUAL: return TCompareOp.GREATER_OR_EQUAL; + case GREATER: return TCompareOp.GREATER; + case NO_OP: return TCompareOp.NO_OP; + default: return null; + } + } + public static List splitKeyFromHBase(byte[][] in) { + if (in == null || in.length == 0) { + return null; + } + List out = new ArrayList<>(in.length); + for (byte[] key : in) { + out.add(ByteBuffer.wrap(key)); + } + return out; + } + + public static Result[] resultsFromThrift(List in) { + Result[] out = new Result[in.size()]; + int index = 0; + for (TResult tResult : in) { + out[index++] = resultFromThrift(tResult); + } + return out; + } + + public static List getsFromHBase(List in) { + List out = new ArrayList<>(in.size()); + for (Get get : in) { + out.add(getFromHBase(get)); + } + return out; + } + } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java new file mode 100644 index 00000000000..d45a6db4537 --- /dev/null +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java @@ -0,0 +1,1405 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.thrift2.client; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Future; +import java.util.regex.Pattern; + +import org.apache.commons.lang3.NotImplementedException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CacheEvictionStats; +import org.apache.hadoop.hbase.ClusterMetrics; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.NamespaceNotFoundException; +import org.apache.hadoop.hbase.RegionMetrics; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; +import org.apache.hadoop.hbase.client.CompactType; +import org.apache.hadoop.hbase.client.CompactionState; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.RegionInfo; +import org.apache.hadoop.hbase.client.SnapshotDescription; +import org.apache.hadoop.hbase.client.SnapshotType; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.replication.TableCFs; +import org.apache.hadoop.hbase.client.security.SecurityCapability; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; +import org.apache.hadoop.hbase.quotas.QuotaFilter; +import org.apache.hadoop.hbase.quotas.QuotaRetriever; +import org.apache.hadoop.hbase.quotas.QuotaSettings; +import org.apache.hadoop.hbase.replication.ReplicationException; +import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; +import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; +import org.apache.hadoop.hbase.replication.SyncReplicationState; +import org.apache.hadoop.hbase.thrift2.ThriftUtilities; +import org.apache.hadoop.hbase.thrift2.generated.TColumnFamilyDescriptor; +import org.apache.hadoop.hbase.thrift2.generated.THBaseService; +import org.apache.hadoop.hbase.thrift2.generated.TNamespaceDescriptor; +import org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor; +import org.apache.hadoop.hbase.thrift2.generated.TTableName; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.thrift.TException; +import org.apache.thrift.transport.TTransport; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public class ThriftAdmin implements Admin { + + private THBaseService.Client client; + private TTransport transport; + private int operationTimeout; + private Configuration conf; + + + public ThriftAdmin(THBaseService.Client client, TTransport tTransport, Configuration conf) { + this.client = client; + this.transport = tTransport; + this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, + HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); + this.conf = conf; + } + + @Override + public int getOperationTimeout() { + return operationTimeout; + } + + @Override + public void abort(String why, Throwable e) { + + } + + @Override + public boolean isAborted() { + return false; + } + + @Override + public void close() throws IOException { + transport.close(); + } + + @Override + public Configuration getConfiguration() { + return conf; + } + + @Override + public boolean tableExists(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + return client.tableExists(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public Connection getConnection() { + throw new NotImplementedException("getConnection not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] listTables() throws IOException { + return listTables((String)null); + } + + @Override + public List listTableDescriptors() throws IOException { + return listTableDescriptors((Pattern) null); + } + + @Override + public HTableDescriptor[] listTables(Pattern pattern) throws IOException { + String regex = (pattern == null ? null : pattern.toString()); + return listTables(regex); + } + + @Override + public List listTableDescriptors(Pattern pattern) throws IOException { + return listTableDescriptors(pattern, false); + } + + @Override + public HTableDescriptor[] listTables(String regex) throws IOException { + return listTables(regex, false); + } + + @Override + public HTableDescriptor[] listTables(Pattern pattern, boolean includeSysTables) + throws IOException { + String regex = (pattern == null ? null : pattern.toString()); + return listTables(regex, includeSysTables); + + } + + @Override + public List listTableDescriptors(Pattern pattern, boolean includeSysTables) + throws IOException { + try { + String regex = (pattern == null ? null : pattern.toString()); + List tTableDescriptors = client + .getTableDescriptorsByPattern(regex, includeSysTables); + return ThriftUtilities.tableDescriptorsFromThrift(tTableDescriptors); + + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public HTableDescriptor[] listTables(String regex, boolean includeSysTables) throws IOException { + try { + List tTableDescriptors = client + .getTableDescriptorsByPattern(regex, includeSysTables); + return ThriftUtilities.hTableDescriptorsFromThrift(tTableDescriptors); + + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public TableName[] listTableNames() throws IOException { + return listTableNames((String)null); + } + + @Override + public TableName[] listTableNames(Pattern pattern) throws IOException { + return listTableNames(pattern, false); + } + + @Override + public TableName[] listTableNames(String regex) throws IOException { + return listTableNames(regex, false); + } + + @Override + public TableName[] listTableNames(Pattern pattern, boolean includeSysTables) throws IOException { + String regex = (pattern == null ? null : pattern.toString()); + return listTableNames(regex, includeSysTables); + } + + @Override + public TableName[] listTableNames(String regex, boolean includeSysTables) throws IOException { + try { + List tTableNames = client.getTableNamesByPattern(regex, includeSysTables); + return ThriftUtilities.tableNamesArrayFromThrift(tTableNames); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public HTableDescriptor getTableDescriptor(TableName tableName) + throws TableNotFoundException, IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + TTableDescriptor tTableDescriptor = client.getTableDescriptor(tTableName); + return ThriftUtilities.hTableDescriptorFromThrift(tTableDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public TableDescriptor getDescriptor(TableName tableName) + throws TableNotFoundException, IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + TTableDescriptor tTableDescriptor = client.getTableDescriptor(tTableName); + return ThriftUtilities.tableDescriptorFromThrift(tTableDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public HTableDescriptor[] listTableDescriptorsByNamespace(String name) throws IOException { + try { + List tTableDescriptors = client.getTableDescriptorsByNamespace(name); + return ThriftUtilities.hTableDescriptorsFromThrift(tTableDescriptors); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public List listTableDescriptorsByNamespace(byte[] name) throws IOException { + try { + List tTableDescriptors = client + .getTableDescriptorsByNamespace(Bytes.toString(name)); + return ThriftUtilities.tableDescriptorsFromThrift(tTableDescriptors); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public TableName[] listTableNamesByNamespace(String name) throws IOException { + try { + List tTableNames = client.getTableNamesByNamespace(name); + return ThriftUtilities.tableNamesArrayFromThrift(tTableNames); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void createTable(TableDescriptor desc) throws IOException { + createTable(desc, null); + } + + @Override + public void createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, int numRegions) + throws IOException { + if(numRegions < 3) { + throw new IllegalArgumentException("Must create at least three regions"); + } else if(Bytes.compareTo(startKey, endKey) >= 0) { + throw new IllegalArgumentException("Start key must be smaller than end key"); + } + if (numRegions == 3) { + createTable(desc, new byte[][]{startKey, endKey}); + return; + } + byte [][] splitKeys = Bytes.split(startKey, endKey, numRegions - 3); + if(splitKeys == null || splitKeys.length != numRegions - 1) { + throw new IllegalArgumentException("Unable to split key range into enough regions"); + } + createTable(desc, splitKeys); + } + + @Override + public void createTable(TableDescriptor desc, byte[][] splitKeys) throws IOException { + TTableDescriptor tTableDescriptor = ThriftUtilities.tableDescriptorFromHBase(desc); + List splitKeyInBuffer = ThriftUtilities.splitKeyFromHBase(splitKeys); + try { + client.createTable(tTableDescriptor, splitKeyInBuffer); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void deleteTable(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + client.deleteTable(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void truncateTable(TableName tableName, boolean preserveSplits) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + client.truncateTable(tTableName, preserveSplits); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void enableTable(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + client.enableTable(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void disableTable(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + client.disableTable(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean isTableEnabled(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + return client.isTableEnabled(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean isTableDisabled(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + return client.isTableDisabled(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean isTableAvailable(TableName tableName) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + return client.isTableAvailable(tTableName); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean isTableAvailable(TableName tableName, byte[][] splitKeys) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + List splitKeyInBuffer = ThriftUtilities.splitKeyFromHBase(splitKeys); + try { + return client.isTableAvailableWithSplit(tTableName, splitKeyInBuffer); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void addColumnFamily(TableName tableName, ColumnFamilyDescriptor columnFamily) + throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + TColumnFamilyDescriptor tColumnFamilyDescriptor = ThriftUtilities + .columnFamilyDescriptorFromHBase(columnFamily); + try { + client.addColumnFamily(tTableName, tColumnFamilyDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void deleteColumn(TableName tableName, byte[] columnFamily) throws IOException { + deleteColumnFamily(tableName, columnFamily); + } + + @Override + public void deleteColumnFamily(TableName tableName, byte[] columnFamily) throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + try { + client.deleteColumnFamily(tTableName, ByteBuffer.wrap(columnFamily)); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void modifyColumnFamily(TableName tableName, ColumnFamilyDescriptor columnFamily) + throws IOException { + TTableName tTableName = ThriftUtilities.tableNameFromHBase(tableName); + TColumnFamilyDescriptor tColumnFamilyDescriptor = ThriftUtilities + .columnFamilyDescriptorFromHBase(columnFamily); + try { + client.modifyColumnFamily(tTableName, tColumnFamilyDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void modifyTable(TableName tableName, TableDescriptor td) throws IOException { + modifyTable(td); + } + + @Override + public void modifyTable(TableDescriptor td) throws IOException { + TTableDescriptor tTableDescriptor = ThriftUtilities + .tableDescriptorFromHBase(td); + try { + client.modifyTable(tTableDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void modifyNamespace(NamespaceDescriptor descriptor) throws IOException { + TNamespaceDescriptor tNamespaceDescriptor = ThriftUtilities + .namespaceDescriptorFromHBase(descriptor); + try { + client.modifyNamespace(tNamespaceDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void deleteNamespace(String name) throws IOException { + try { + client.deleteNamespace(name); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public NamespaceDescriptor getNamespaceDescriptor(String name) + throws NamespaceNotFoundException, IOException { + try { + TNamespaceDescriptor tNamespaceDescriptor = client.getNamespaceDescriptor(name); + return ThriftUtilities.namespaceDescriptorFromThrift(tNamespaceDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public NamespaceDescriptor[] listNamespaceDescriptors() throws IOException { + try { + List tNamespaceDescriptors = client.listNamespaceDescriptors(); + return ThriftUtilities.namespaceDescriptorsFromThrift(tNamespaceDescriptors); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void createNamespace(NamespaceDescriptor descriptor) throws IOException { + TNamespaceDescriptor tNamespaceDescriptor = ThriftUtilities + .namespaceDescriptorFromHBase(descriptor); + try { + client.createNamespace(tNamespaceDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean switchRpcThrottle(boolean enable) throws IOException { + throw new NotImplementedException("switchRpcThrottle by pattern not supported in ThriftAdmin"); + } + + @Override + public boolean isRpcThrottleEnabled() throws IOException { + throw new NotImplementedException( + "isRpcThrottleEnabled by pattern not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] disableTables(String regex) throws IOException { + throw new NotImplementedException("disableTables by pattern not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] disableTables(Pattern pattern) throws IOException { + throw new NotImplementedException("disableTables by pattern not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] enableTables(String regex) throws IOException { + throw new NotImplementedException("enableTables by pattern not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] enableTables(Pattern pattern) throws IOException { + throw new NotImplementedException("enableTables by pattern not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] deleteTables(String regex) throws IOException { + throw new NotImplementedException("deleteTables by pattern not supported in ThriftAdmin"); + } + + @Override + public HTableDescriptor[] deleteTables(Pattern pattern) throws IOException { + throw new NotImplementedException("deleteTables by pattern not supported in ThriftAdmin"); + + } + + @Override + public HTableDescriptor[] getTableDescriptorsByTableName(List tableNames) + throws IOException { + throw new NotImplementedException("getTableDescriptorsByTableName not supported in ThriftAdmin" + + ", use getDescriptor to get descriptors one by one"); + } + + @Override + public List listTableDescriptors(List tableNames) throws IOException { + throw new NotImplementedException("listTableDescriptors not supported in ThriftAdmin" + + ", use getDescriptor to get descriptors one by one"); + } + + @Override + public HTableDescriptor[] getTableDescriptors(List names) throws IOException { + throw new NotImplementedException("getTableDescriptors not supported in ThriftAdmin" + + ", use getDescriptor to get descriptors one by one"); + } + + @Override + public void closeRegion(String regionname, String serverName) { + throw new NotImplementedException("closeRegion not supported in ThriftAdmin"); + + } + + @Override + public void closeRegion(byte[] regionname, String serverName) { + throw new NotImplementedException("closeRegion not supported in ThriftAdmin"); + + } + + @Override + public boolean closeRegionWithEncodedRegionName(String encodedRegionName, String serverName) { + throw new NotImplementedException( + "closeRegionWithEncodedRegionName not supported in ThriftAdmin"); + } + + @Override + public void closeRegion(ServerName sn, HRegionInfo hri) { + throw new NotImplementedException("closeRegion not supported in ThriftAdmin"); + + } + + @Override + public List getOnlineRegions(ServerName sn) { + throw new NotImplementedException("getOnlineRegions not supported in ThriftAdmin"); + } + + @Override + public List getRegions(ServerName serverName) { + throw new NotImplementedException("getRegions not supported in ThriftAdmin"); + } + + @Override + public void flush(TableName tableName) { + throw new NotImplementedException("flush not supported in ThriftAdmin"); + + } + + @Override + public void flushRegion(byte[] regionName) { + throw new NotImplementedException("flushRegion not supported in ThriftAdmin"); + + } + + @Override + public void flushRegionServer(ServerName serverName) { + throw new NotImplementedException("flushRegionServer not supported in ThriftAdmin"); + + } + + @Override + public void compact(TableName tableName) { + throw new NotImplementedException("compact not supported in ThriftAdmin"); + + } + + @Override + public void compactRegion(byte[] regionName) { + throw new NotImplementedException("compactRegion not supported in ThriftAdmin"); + + } + + @Override + public void compact(TableName tableName, byte[] columnFamily) { + throw new NotImplementedException("compact not supported in ThriftAdmin"); + + } + + @Override + public void compactRegion(byte[] regionName, byte[] columnFamily) { + throw new NotImplementedException("compactRegion not supported in ThriftAdmin"); + + } + + @Override + public void compact(TableName tableName, CompactType compactType) { + throw new NotImplementedException("compact not supported in ThriftAdmin"); + + } + + @Override + public void compact(TableName tableName, byte[] columnFamily, CompactType compactType) { + throw new NotImplementedException("compact not supported in ThriftAdmin"); + + } + + @Override + public void majorCompact(TableName tableName) { + throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); + + } + + @Override + public void majorCompactRegion(byte[] regionName) { + throw new NotImplementedException("majorCompactRegion not supported in ThriftAdmin"); + + } + + @Override + public void majorCompact(TableName tableName, byte[] columnFamily) { + throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); + + } + + @Override + public void majorCompactRegion(byte[] regionName, byte[] columnFamily) { + throw new NotImplementedException("majorCompactRegion not supported in ThriftAdmin"); + + } + + @Override + public void majorCompact(TableName tableName, CompactType compactType) { + throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); + + } + + @Override + public void majorCompact(TableName tableName, byte[] columnFamily, CompactType compactType) { + throw new NotImplementedException("majorCompact not supported in ThriftAdmin"); + + } + + @Override + public Map compactionSwitch(boolean switchState, + List serverNamesList) { + throw new NotImplementedException("compactionSwitch not supported in ThriftAdmin"); + } + + @Override + public void compactRegionServer(ServerName serverName) { + throw new NotImplementedException("compactRegionServer not supported in ThriftAdmin"); + + } + + @Override + public void majorCompactRegionServer(ServerName serverName) { + throw new NotImplementedException("majorCompactRegionServer not supported in ThriftAdmin"); + + } + + @Override + public void move(byte[] encodedRegionName, byte[] destServerName) { + throw new NotImplementedException("move not supported in ThriftAdmin"); + + } + + @Override + public void assign(byte[] regionName) { + throw new NotImplementedException("assign not supported in ThriftAdmin"); + + } + + @Override + public void unassign(byte[] regionName, boolean force) { + throw new NotImplementedException("unassign not supported in ThriftAdmin"); + + } + + @Override + public void offline(byte[] regionName) { + throw new NotImplementedException("offline not supported in ThriftAdmin"); + + } + + @Override + public boolean balancerSwitch(boolean onOrOff, boolean synchronous) { + throw new NotImplementedException("balancerSwitch not supported in ThriftAdmin"); + } + + @Override + public boolean balance() { + throw new NotImplementedException("balance not supported in ThriftAdmin"); + } + + @Override + public boolean balance(boolean force) { + throw new NotImplementedException("balance not supported in ThriftAdmin"); + } + + @Override + public boolean isBalancerEnabled() { + throw new NotImplementedException("isBalancerEnabled not supported in ThriftAdmin"); + } + + @Override + public CacheEvictionStats clearBlockCache(TableName tableName) { + throw new NotImplementedException("clearBlockCache not supported in ThriftAdmin"); + } + + @Override + public boolean normalize() { + throw new NotImplementedException("normalize not supported in ThriftAdmin"); + } + + @Override + public boolean isNormalizerEnabled() { + throw new NotImplementedException("isNormalizerEnabled not supported in ThriftAdmin"); + } + + @Override + public boolean normalizerSwitch(boolean on) { + throw new NotImplementedException("normalizerSwitch not supported in ThriftAdmin"); + } + + @Override + public boolean catalogJanitorSwitch(boolean onOrOff) { + throw new NotImplementedException("catalogJanitorSwitch not supported in ThriftAdmin"); + } + + @Override + public int runCatalogJanitor() { + throw new NotImplementedException("runCatalogJanitor not supported in ThriftAdmin"); + } + + @Override + public boolean isCatalogJanitorEnabled() { + throw new NotImplementedException("isCatalogJanitorEnabled not supported in ThriftAdmin"); + } + + @Override + public boolean cleanerChoreSwitch(boolean onOrOff) { + throw new NotImplementedException("cleanerChoreSwitch not supported in ThriftAdmin"); + } + + @Override + public boolean runCleanerChore() { + throw new NotImplementedException("runCleanerChore not supported in ThriftAdmin"); + } + + @Override + public boolean isCleanerChoreEnabled() { + throw new NotImplementedException("isCleanerChoreEnabled not supported in ThriftAdmin"); + } + + @Override + public void mergeRegions(byte[] nameOfRegionA, byte[] nameOfRegionB, boolean forcible) { + throw new NotImplementedException("mergeRegions not supported in ThriftAdmin"); + + } + + @Override + public Future mergeRegionsAsync(byte[] nameOfRegionA, byte[] nameOfRegionB, + boolean forcible) { + throw new NotImplementedException("mergeRegionsAsync not supported in ThriftAdmin"); + } + + @Override + public Future mergeRegionsAsync(byte[][] nameofRegionsToMerge, boolean forcible) { + throw new NotImplementedException("mergeRegionsAsync not supported in ThriftAdmin"); + } + + @Override + public void split(TableName tableName) { + throw new NotImplementedException("split not supported in ThriftAdmin"); + } + + @Override + public void splitRegion(byte[] regionName) { + throw new NotImplementedException("splitRegion not supported in ThriftAdmin"); + } + + @Override + public void split(TableName tableName, byte[] splitPoint) { + throw new NotImplementedException("split not supported in ThriftAdmin"); + } + + @Override + public void splitRegion(byte[] regionName, byte[] splitPoint) { + throw new NotImplementedException("splitRegion not supported in ThriftAdmin"); + } + + @Override + public Future splitRegionAsync(byte[] regionName, byte[] splitPoint) { + throw new NotImplementedException("splitRegionAsync not supported in ThriftAdmin"); + } + + @Override + public Future modifyTableAsync(TableName tableName, TableDescriptor td) { + throw new NotImplementedException("modifyTableAsync not supported in ThriftAdmin"); + } + + @Override + public Future modifyTableAsync(TableDescriptor td) { + throw new NotImplementedException("modifyTableAsync not supported in ThriftAdmin"); + } + + @Override + public void shutdown() { + throw new NotImplementedException("shutdown not supported in ThriftAdmin"); + + } + + @Override + public void stopMaster() { + throw new NotImplementedException("stopMaster not supported in ThriftAdmin"); + + } + + @Override + public boolean isMasterInMaintenanceMode() { + throw new NotImplementedException("isMasterInMaintenanceMode not supported in ThriftAdmin"); + } + + @Override + public void stopRegionServer(String hostnamePort) { + throw new NotImplementedException("stopRegionServer not supported in ThriftAdmin"); + + } + + @Override + public ClusterMetrics getClusterMetrics(EnumSet options) { + throw new NotImplementedException("getClusterMetrics not supported in ThriftAdmin"); + } + + @Override + public List getRegionMetrics(ServerName serverName, TableName tableName) { + throw new NotImplementedException("getRegionMetrics not supported in ThriftAdmin"); + } + + @Override + public Future createNamespaceAsync(NamespaceDescriptor descriptor) { + throw new NotImplementedException("createNamespaceAsync not supported in ThriftAdmin"); + } + + @Override + public Future modifyNamespaceAsync(NamespaceDescriptor descriptor) { + throw new NotImplementedException("modifyNamespaceAsync not supported in ThriftAdmin"); + } + + @Override + public List getTableRegions(TableName tableName) { + throw new NotImplementedException("getTableRegions not supported in ThriftAdmin"); + } + + @Override + public List getRegions(TableName tableName) { + throw new NotImplementedException("getRegions not supported in ThriftAdmin"); + } + + @Override + public boolean abortProcedure(long procId, boolean mayInterruptIfRunning) { + throw new NotImplementedException("abortProcedure not supported in ThriftAdmin"); + } + + @Override + public Future abortProcedureAsync(long procId, boolean mayInterruptIfRunning) { + throw new NotImplementedException("abortProcedureAsync not supported in ThriftAdmin"); + } + + @Override + public String getProcedures() { + throw new NotImplementedException("getProcedures not supported in ThriftAdmin"); + } + + @Override + public String getLocks() { + throw new NotImplementedException("getLocks not supported in ThriftAdmin"); + } + + @Override + public void rollWALWriter(ServerName serverName) { + throw new NotImplementedException("rollWALWriter not supported in ThriftAdmin"); + + } + + @Override + public CompactionState getCompactionState(TableName tableName) { + throw new NotImplementedException("getCompactionState not supported in ThriftAdmin"); + } + + @Override + public CompactionState getCompactionState(TableName tableName, CompactType compactType) { + throw new NotImplementedException("getCompactionState not supported in ThriftAdmin"); + } + + @Override + public CompactionState getCompactionStateForRegion(byte[] regionName) { + throw new NotImplementedException("getCompactionStateForRegion not supported in ThriftAdmin"); + } + + @Override + public long getLastMajorCompactionTimestamp(TableName tableName) { + throw new NotImplementedException( + "getLastMajorCompactionTimestamp not supported in ThriftAdmin"); + } + + @Override + public long getLastMajorCompactionTimestampForRegion(byte[] regionName) { + throw new NotImplementedException( + "getLastMajorCompactionTimestampForRegion not supported in ThriftAdmin"); + } + + @Override + public void snapshot(String snapshotName, TableName tableName) { + throw new NotImplementedException("snapshot not supported in ThriftAdmin"); + + } + + @Override + public void snapshot(byte[] snapshotName, TableName tableName) { + throw new NotImplementedException("snapshot not supported in ThriftAdmin"); + + } + + @Override + public void snapshot(String snapshotName, TableName tableName, SnapshotType type) { + throw new NotImplementedException("snapshot not supported in ThriftAdmin"); + + } + + @Override + public void snapshot(SnapshotDescription snapshot) { + throw new NotImplementedException("snapshot not supported in ThriftAdmin"); + + } + + @Override + public void snapshotAsync(SnapshotDescription snapshot) { + throw new NotImplementedException("snapshotAsync not supported in ThriftAdmin"); + + } + + @Override + public boolean isSnapshotFinished(SnapshotDescription snapshot) { + throw new NotImplementedException("isSnapshotFinished not supported in ThriftAdmin"); + } + + @Override + public void restoreSnapshot(byte[] snapshotName) { + throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void restoreSnapshot(String snapshotName) { + throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); + + } + + @Override + public Future restoreSnapshotAsync(String snapshotName) { + throw new NotImplementedException("restoreSnapshotAsync not supported in ThriftAdmin"); + } + + @Override + public void restoreSnapshot(byte[] snapshotName, boolean takeFailSafeSnapshot) { + throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void restoreSnapshot(String snapshotName, boolean takeFailSafeSnapshot) { + throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void restoreSnapshot(String snapshotName, boolean takeFailSafeSnapshot, + boolean restoreAcl) { + throw new NotImplementedException("restoreSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void cloneSnapshot(byte[] snapshotName, TableName tableName) { + throw new NotImplementedException("cloneSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void cloneSnapshot(String snapshotName, TableName tableName, boolean restoreAcl) { + throw new NotImplementedException("cloneSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void cloneSnapshot(String snapshotName, TableName tableName) { + throw new NotImplementedException("cloneSnapshot not supported in ThriftAdmin"); + + } + + @Override + public Future cloneSnapshotAsync(String snapshotName, TableName tableName) { + throw new NotImplementedException("cloneSnapshotAsync not supported in ThriftAdmin"); + } + + @Override + public void execProcedure(String signature, String instance, Map props) { + throw new NotImplementedException("execProcedure not supported in ThriftAdmin"); + + } + + @Override + public byte[] execProcedureWithReturn(String signature, String instance, + Map props) { + throw new NotImplementedException("execProcedureWithReturn not supported in ThriftAdmin"); + } + + @Override + public boolean isProcedureFinished(String signature, String instance, Map props) { + throw new NotImplementedException("isProcedureFinished not supported in ThriftAdmin"); + } + + @Override + public List listSnapshots() { + throw new NotImplementedException("listSnapshots not supported in ThriftAdmin"); + } + + @Override + public List listSnapshots(String regex) { + throw new NotImplementedException("listSnapshots not supported in ThriftAdmin"); + } + + @Override + public List listSnapshots(Pattern pattern) { + throw new NotImplementedException("listSnapshots not supported in ThriftAdmin"); + } + + @Override + public List listTableSnapshots(String tableNameRegex, + String snapshotNameRegex) { + throw new NotImplementedException("listTableSnapshots not supported in ThriftAdmin"); + } + + @Override + public List listTableSnapshots(Pattern tableNamePattern, + Pattern snapshotNamePattern) { + throw new NotImplementedException("listTableSnapshots not supported in ThriftAdmin"); + } + + @Override + public void deleteSnapshot(byte[] snapshotName) { + throw new NotImplementedException("deleteSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void deleteSnapshot(String snapshotName) { + throw new NotImplementedException("deleteSnapshot not supported in ThriftAdmin"); + + } + + @Override + public void deleteSnapshots(String regex) { + throw new NotImplementedException("deleteSnapshots not supported in ThriftAdmin"); + + } + + @Override + public void deleteSnapshots(Pattern pattern) { + throw new NotImplementedException("deleteSnapshots not supported in ThriftAdmin"); + + } + + @Override + public void deleteTableSnapshots(String tableNameRegex, String snapshotNameRegex) { + throw new NotImplementedException("deleteTableSnapshots not supported in ThriftAdmin"); + + } + + @Override + public void deleteTableSnapshots(Pattern tableNamePattern, Pattern snapshotNamePattern) { + throw new NotImplementedException("deleteTableSnapshots not supported in ThriftAdmin"); + + } + + @Override + public void setQuota(QuotaSettings quota) { + throw new NotImplementedException("setQuota not supported in ThriftAdmin"); + + } + + @Override + public QuotaRetriever getQuotaRetriever(QuotaFilter filter) { + throw new NotImplementedException("getQuotaRetriever not supported in ThriftAdmin"); + } + + @Override + public List getQuota(QuotaFilter filter) { + throw new NotImplementedException("getQuota not supported in ThriftAdmin"); + } + + @Override + public CoprocessorRpcChannel coprocessorService() { + throw new NotImplementedException("coprocessorService not supported in ThriftAdmin"); + } + + @Override + public CoprocessorRpcChannel coprocessorService(ServerName serverName) { + throw new NotImplementedException("coprocessorService not supported in ThriftAdmin"); + } + + @Override + public void updateConfiguration(ServerName server) { + throw new NotImplementedException("updateConfiguration not supported in ThriftAdmin"); + + } + + @Override + public void updateConfiguration() { + throw new NotImplementedException("updateConfiguration not supported in ThriftAdmin"); + + } + + @Override + public List getSecurityCapabilities() { + throw new NotImplementedException("getSecurityCapabilities not supported in ThriftAdmin"); + } + + @Override + public boolean splitSwitch(boolean enabled, boolean synchronous) { + throw new NotImplementedException("splitSwitch not supported in ThriftAdmin"); + } + + @Override + public boolean mergeSwitch(boolean enabled, boolean synchronous) { + throw new NotImplementedException("mergeSwitch not supported in ThriftAdmin"); + } + + @Override + public boolean isSplitEnabled() { + throw new NotImplementedException("isSplitEnabled not supported in ThriftAdmin"); + } + + @Override + public boolean isMergeEnabled() { + throw new NotImplementedException("isMergeEnabled not supported in ThriftAdmin"); + } + + @Override + public void addReplicationPeer(String peerId, ReplicationPeerConfig peerConfig, boolean enabled) { + throw new NotImplementedException("addReplicationPeer not supported in ThriftAdmin"); + + } + + @Override + public Future addReplicationPeerAsync(String peerId, ReplicationPeerConfig peerConfig, + boolean enabled) { + throw new NotImplementedException("addReplicationPeerAsync not supported in ThriftAdmin"); + } + + @Override + public void removeReplicationPeer(String peerId) { + throw new NotImplementedException("removeReplicationPeer not supported in ThriftAdmin"); + + } + + @Override + public Future removeReplicationPeerAsync(String peerId) { + throw new NotImplementedException("removeReplicationPeerAsync not supported in ThriftAdmin"); + } + + @Override + public void enableReplicationPeer(String peerId) { + throw new NotImplementedException("enableReplicationPeer not supported in ThriftAdmin"); + + } + + @Override + public Future enableReplicationPeerAsync(String peerId) { + throw new NotImplementedException("enableReplicationPeerAsync not supported in ThriftAdmin"); + } + + @Override + public void disableReplicationPeer(String peerId) { + throw new NotImplementedException("disableReplicationPeer not supported in ThriftAdmin"); + + } + + @Override + public Future disableReplicationPeerAsync(String peerId) { + throw new NotImplementedException("disableReplicationPeerAsync not supported in ThriftAdmin"); + } + + @Override + public ReplicationPeerConfig getReplicationPeerConfig(String peerId) { + throw new NotImplementedException("getReplicationPeerConfig not supported in ThriftAdmin"); + } + + @Override + public void updateReplicationPeerConfig(String peerId, ReplicationPeerConfig peerConfig) { + throw new NotImplementedException("updateReplicationPeerConfig not supported in ThriftAdmin"); + + } + + @Override + public Future updateReplicationPeerConfigAsync(String peerId, + ReplicationPeerConfig peerConfig) { + throw new NotImplementedException( + "updateReplicationPeerConfigAsync not supported in ThriftAdmin"); + } + + @Override + public void appendReplicationPeerTableCFs(String id, Map> tableCfs) + throws ReplicationException, IOException { + throw new NotImplementedException("appendReplicationPeerTableCFs not supported in ThriftAdmin"); + + } + + @Override + public void removeReplicationPeerTableCFs(String id, Map> tableCfs) + throws ReplicationException, IOException { + throw new NotImplementedException("removeReplicationPeerTableCFs not supported in ThriftAdmin"); + + } + + @Override + public List listReplicationPeers() { + throw new NotImplementedException("listReplicationPeers not supported in ThriftAdmin"); + } + + @Override + public List listReplicationPeers(Pattern pattern) { + throw new NotImplementedException("listReplicationPeers not supported in ThriftAdmin"); + } + + @Override + public void transitReplicationPeerSyncReplicationState(String peerId, + SyncReplicationState state) { + throw new NotImplementedException( + "transitReplicationPeerSyncReplicationState not supported in ThriftAdmin"); + + } + + @Override + public Future transitReplicationPeerSyncReplicationStateAsync(String peerId, + SyncReplicationState state) { + throw new NotImplementedException( + "transitReplicationPeerSyncReplicationStateAsync not supported in ThriftAdmin"); + } + + @Override + public void decommissionRegionServers(List servers, boolean offload) { + throw new NotImplementedException("decommissionRegionServers not supported in ThriftAdmin"); + + } + + @Override + public List listDecommissionedRegionServers() { + throw new NotImplementedException( + "listDecommissionedRegionServers not supported in ThriftAdmin"); + } + + @Override + public void recommissionRegionServer(ServerName server, List encodedRegionNames) { + throw new NotImplementedException("recommissionRegionServer not supported in ThriftAdmin"); + + } + + @Override + public List listReplicatedTableCFs() { + throw new NotImplementedException("listReplicatedTableCFs not supported in ThriftAdmin"); + } + + @Override + public void enableTableReplication(TableName tableName) { + throw new NotImplementedException("enableTableReplication not supported in ThriftAdmin"); + + } + + @Override + public void disableTableReplication(TableName tableName) { + throw new NotImplementedException("disableTableReplication not supported in ThriftAdmin"); + + } + + @Override + public void clearCompactionQueues(ServerName serverName, Set queues) { + throw new NotImplementedException("clearCompactionQueues not supported in ThriftAdmin"); + + } + + @Override + public List clearDeadServers(List servers) { + throw new NotImplementedException("clearDeadServers not supported in ThriftAdmin"); + } + + @Override + public void cloneTableSchema(TableName tableName, TableName newTableName, + boolean preserveSplits) { + throw new NotImplementedException("cloneTableSchema not supported in ThriftAdmin"); + + } + + @Override + public Future createTableAsync(TableDescriptor desc, byte[][] splitKeys) { + throw new NotImplementedException("createTableAsync not supported in ThriftAdmin"); + } + + @Override + public Future deleteTableAsync(TableName tableName) { + throw new NotImplementedException("deleteTableAsync not supported in ThriftAdmin"); + } + + @Override + public Future truncateTableAsync(TableName tableName, boolean preserveSplits) { + throw new NotImplementedException("truncateTableAsync not supported in ThriftAdmin"); + } + + @Override + public Future enableTableAsync(TableName tableName) { + throw new NotImplementedException("enableTableAsync not supported in ThriftAdmin"); + } + + @Override + public Future disableTableAsync(TableName tableName) { + throw new NotImplementedException("disableTableAsync not supported in ThriftAdmin"); + } + + @Override + public Pair getAlterStatus(TableName tableName) { + throw new NotImplementedException("getAlterStatus not supported in ThriftAdmin"); + } + + @Override + public Pair getAlterStatus(byte[] tableName) { + throw new NotImplementedException("getAlterStatus not supported in ThriftAdmin"); + } + + @Override + public Future deleteColumnFamilyAsync(TableName tableName, byte[] columnFamily) { + throw new NotImplementedException("deleteColumnFamilyAsync not supported in ThriftAdmin"); + } + + @Override + public Future addColumnFamilyAsync(TableName tableName, + ColumnFamilyDescriptor columnFamily) { + throw new NotImplementedException("addColumnFamilyAsync not supported in ThriftAdmin"); + } + + @Override + public Future modifyColumnFamilyAsync(TableName tableName, + ColumnFamilyDescriptor columnFamily) { + throw new NotImplementedException("modifyColumnFamilyAsync not supported in ThriftAdmin"); + } + + @Override + public Future deleteNamespaceAsync(String name) { + throw new NotImplementedException("deleteNamespaceAsync not supported in ThriftAdmin"); + } +} diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.java new file mode 100644 index 00000000000..3c11c98b32a --- /dev/null +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftClientBuilder.java @@ -0,0 +1,37 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.thrift2.client; + +import java.io.IOException; + +import org.apache.hadoop.hbase.thrift2.generated.THBaseService; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.thrift.transport.TTransport; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public abstract class ThriftClientBuilder { + protected ThriftConnection connection; + + public ThriftClientBuilder(ThriftConnection connection) { + this.connection = connection; + } + + public abstract Pair getClient() throws IOException; +} diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftConnection.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftConnection.java new file mode 100644 index 00000000000..a0f902a8116 --- /dev/null +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftConnection.java @@ -0,0 +1,322 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.thrift2.client; + +import static org.apache.hadoop.hbase.ipc.RpcClient.DEFAULT_SOCKET_TIMEOUT_CONNECT; +import static org.apache.hadoop.hbase.ipc.RpcClient.SOCKET_TIMEOUT_CONNECT; + +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutorService; + +import org.apache.commons.lang3.NotImplementedException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.BufferedMutator; +import org.apache.hadoop.hbase.client.BufferedMutatorParams; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.RegionLocator; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableBuilder; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.thrift.Constants; +import org.apache.hadoop.hbase.thrift2.generated.THBaseService; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.http.client.HttpClient; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.utils.HttpClientUtils; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.thrift.protocol.TBinaryProtocol; +import org.apache.thrift.protocol.TCompactProtocol; +import org.apache.thrift.protocol.TProtocol; +import org.apache.thrift.transport.TFramedTransport; +import org.apache.thrift.transport.THttpClient; +import org.apache.thrift.transport.TSocket; +import org.apache.thrift.transport.TTransport; +import org.apache.thrift.transport.TTransportException; +import org.apache.yetus.audience.InterfaceAudience; + +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; + +@InterfaceAudience.Private +public class ThriftConnection implements Connection { + private Configuration conf; + private User user; + // For HTTP protocol + private HttpClient httpClient; + private boolean httpClientCreated = false; + private boolean isClosed = false; + + private String host; + private int port; + private boolean isFramed = false; + private boolean isCompact = false; + + private ThriftClientBuilder clientBuilder; + + private int operationTimeout; + private int connectTimeout; + + public ThriftConnection(Configuration conf, ExecutorService pool, final User user) + throws IOException { + this.conf = conf; + this.user = user; + this.host = conf.get(Constants.HBASE_THRIFT_SERVER_NAME); + this.port = conf.getInt(Constants.HBASE_THRIFT_SERVER_PORT, -1); + Preconditions.checkArgument(port > 0); + Preconditions.checkArgument(host != null); + this.isFramed = conf.getBoolean(Constants.FRAMED_CONF_KEY, Constants.FRAMED_CONF_DEFAULT); + this.isCompact = conf.getBoolean(Constants.COMPACT_CONF_KEY, Constants.COMPACT_CONF_DEFAULT); + this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, + HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); + this.connectTimeout = conf.getInt(SOCKET_TIMEOUT_CONNECT, DEFAULT_SOCKET_TIMEOUT_CONNECT); + + String className = conf.get(Constants.HBASE_THRIFT_CLIENT_BUIDLER_CLASS, + DefaultThriftClientBuilder.class.getName()); + try { + Class clazz = Class.forName(className); + Constructor constructor = clazz + .getDeclaredConstructor(ThriftConnection.class); + constructor.setAccessible(true); + clientBuilder = (ThriftClientBuilder) constructor.newInstance(this); + }catch (Exception e) { + throw new IOException(e); + } + } + + public synchronized void setHttpClient(HttpClient httpClient) { + this.httpClient = httpClient; + } + + @Override + public Configuration getConfiguration() { + return conf; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public boolean isFramed() { + return isFramed; + } + + public boolean isCompact() { + return isCompact; + } + + public int getOperationTimeout() { + return operationTimeout; + } + + public int getConnectTimeout() { + return connectTimeout; + } + + public ThriftClientBuilder getClientBuilder() { + return clientBuilder; + } + + /** + * the default thrift client builder. + * One can extend the ThriftClientBuilder to builder custom client, implement + * features like authentication(hbase-examples/thrift/DemoClient) + * + */ + public static class DefaultThriftClientBuilder extends ThriftClientBuilder { + + @Override + public Pair getClient() throws IOException { + TSocket sock = new TSocket(connection.getHost(), connection.getPort()); + sock.setSocketTimeout(connection.getOperationTimeout()); + sock.setConnectTimeout(connection.getConnectTimeout()); + TTransport tTransport = sock; + if (connection.isFramed()) { + tTransport = new TFramedTransport(tTransport); + } + try { + sock.open(); + } catch (TTransportException e) { + throw new IOException(e); + } + TProtocol prot; + if (connection.isCompact()) { + prot = new TCompactProtocol(tTransport); + } else { + prot = new TBinaryProtocol(tTransport); + } + THBaseService.Client client = new THBaseService.Client(prot); + return new Pair<>(client, tTransport); + } + + public DefaultThriftClientBuilder(ThriftConnection connection) { + super(connection); + } + } + + /** + * the default thrift http client builder. + * One can extend the ThriftClientBuilder to builder custom http client, implement + * features like authentication or 'DoAs'(hbase-examples/thrift/HttpDoAsClient) + * + */ + public static class HTTPThriftClientBuilder extends ThriftClientBuilder { + Map customHeader = new HashMap<>(); + + public HTTPThriftClientBuilder(ThriftConnection connection) { + super(connection); + } + + public void addCostumHeader(String key, String value) { + customHeader.put(key, value); + } + + @Override + public Pair getClient() throws IOException { + Preconditions.checkArgument(connection.getHost().startsWith("http"), + "http client host must start with http or https"); + String url = connection.getHost() + ":" + connection.getPort(); + try { + THttpClient httpClient = new THttpClient(url, connection.getHttpClient()); + for (Map.Entry header : customHeader.entrySet()) { + httpClient.setCustomHeader(header.getKey(), header.getValue()); + } + httpClient.open(); + TProtocol prot = new TBinaryProtocol(httpClient); + THBaseService.Client client = new THBaseService.Client(prot); + return new Pair<>(client, httpClient); + } catch (TTransportException e) { + throw new IOException(e); + } + + } + } + + /** + * Get a ThriftAdmin, ThriftAdmin is NOT thread safe + * @return a ThriftAdmin + * @throws IOException IOException + */ + @Override + public Admin getAdmin() throws IOException { + Pair client = clientBuilder.getClient(); + return new ThriftAdmin(client.getFirst(), client.getSecond(), conf); + } + + public synchronized HttpClient getHttpClient() { + if (httpClient != null) { + return httpClient; + } + HttpClientBuilder builder = HttpClientBuilder.create(); + RequestConfig.Builder requestBuilder = RequestConfig.custom(); + requestBuilder = requestBuilder.setConnectTimeout(getConnectTimeout()); + requestBuilder = requestBuilder.setConnectionRequestTimeout(getOperationTimeout()); + builder.setDefaultRequestConfig(requestBuilder.build()); + httpClient = builder.build(); + httpClientCreated = true; + return httpClient; + } + + @Override + public synchronized void close() throws IOException { + if (httpClient != null && httpClientCreated) { + HttpClientUtils.closeQuietly(httpClient); + } + isClosed = true; + } + + @Override + public boolean isClosed() { + return isClosed; + } + + /** + * Get a TableBuider to build ThriftTable, ThriftTable is NOT thread safe + * @return a TableBuilder + * @throws IOException IOException + */ + @Override + public TableBuilder getTableBuilder(TableName tableName, ExecutorService pool) { + return new TableBuilder() { + @Override + public TableBuilder setOperationTimeout(int timeout) { + return this; + } + + @Override + public TableBuilder setRpcTimeout(int timeout) { + return this; + } + + @Override + public TableBuilder setReadRpcTimeout(int timeout) { + return this; + } + + @Override + public TableBuilder setWriteRpcTimeout(int timeout) { + return this; + } + + @Override + public Table build() { + try { + Pair client = clientBuilder.getClient(); + return new ThriftTable(tableName, client.getFirst(), client.getSecond(), conf); + } catch (IOException ioE) { + throw new RuntimeException(ioE); + } + + } + }; + } + + @Override + public void abort(String why, Throwable e) { + + } + + @Override + public boolean isAborted() { + return false; + } + + @Override + public BufferedMutator getBufferedMutator(TableName tableName) throws IOException { + throw new NotImplementedException("batchCoprocessorService not supported in ThriftTable"); + } + + @Override + public BufferedMutator getBufferedMutator(BufferedMutatorParams params) throws IOException { + throw new NotImplementedException("batchCoprocessorService not supported in ThriftTable"); + } + + @Override + public RegionLocator getRegionLocator(TableName tableName) throws IOException { + throw new NotImplementedException("batchCoprocessorService not supported in ThriftTable"); + } +} diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftTable.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftTable.java new file mode 100644 index 00000000000..5c826b972cb --- /dev/null +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftTable.java @@ -0,0 +1,492 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.thrift2.client; + +import static org.apache.hadoop.hbase.thrift.Constants.HBASE_THRIFT_CLIENT_SCANNER_CACHING; +import static org.apache.hadoop.hbase.thrift.Constants.HBASE_THRIFT_CLIENT_SCANNER_CACHING_DEFAULT; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang3.NotImplementedException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Append; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Increment; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Row; +import org.apache.hadoop.hbase.client.RowMutations; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.coprocessor.Batch; +import org.apache.hadoop.hbase.client.metrics.ScanMetrics; +import org.apache.hadoop.hbase.io.TimeRange; +import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; +import org.apache.hadoop.hbase.thrift2.ThriftUtilities; +import org.apache.hadoop.hbase.thrift2.generated.TAppend; +import org.apache.hadoop.hbase.thrift2.generated.TDelete; +import org.apache.hadoop.hbase.thrift2.generated.TGet; +import org.apache.hadoop.hbase.thrift2.generated.THBaseService; +import org.apache.hadoop.hbase.thrift2.generated.TIncrement; +import org.apache.hadoop.hbase.thrift2.generated.TPut; +import org.apache.hadoop.hbase.thrift2.generated.TResult; +import org.apache.hadoop.hbase.thrift2.generated.TRowMutations; +import org.apache.hadoop.hbase.thrift2.generated.TScan; +import org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.thrift.TException; +import org.apache.thrift.transport.TTransport; +import org.apache.yetus.audience.InterfaceAudience; + +import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; +import org.apache.hbase.thirdparty.com.google.common.primitives.Booleans; + +@InterfaceAudience.Private +public class ThriftTable implements Table { + + private TableName tableName; + private Configuration conf; + private TTransport tTransport; + private THBaseService.Client client; + private ByteBuffer tableNameInBytes; + private int operationTimeout; + + private final int scannerCaching; + + public ThriftTable(TableName tableName, THBaseService.Client client, TTransport tTransport, + Configuration conf) { + this.tableName = tableName; + this.tableNameInBytes = ByteBuffer.wrap(tableName.toBytes()); + this.conf = conf; + this.tTransport = tTransport; + this.client = client; + this.scannerCaching = conf.getInt(HBASE_THRIFT_CLIENT_SCANNER_CACHING, + HBASE_THRIFT_CLIENT_SCANNER_CACHING_DEFAULT); + this.operationTimeout = conf.getInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, + HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT); + + + } + + @Override + public TableName getName() { + return tableName; + } + + @Override + public Configuration getConfiguration() { + return conf; + } + + @Override + public TableDescriptor getDescriptor() throws IOException { + try { + TTableDescriptor tableDescriptor = client + .getTableDescriptor(ThriftUtilities.tableNameFromHBase(tableName)); + return ThriftUtilities.tableDescriptorFromThrift(tableDescriptor); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean exists(Get get) throws IOException { + TGet tGet = ThriftUtilities.getFromHBase(get); + try { + return client.exists(tableNameInBytes, tGet); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public boolean[] exists(List gets) throws IOException { + List tGets = new ArrayList<>(); + for (Get get: gets) { + tGets.add(ThriftUtilities.getFromHBase(get)); + } + try { + List results = client.existsAll(tableNameInBytes, tGets); + return Booleans.toArray(results); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void batch(List actions, Object[] results) + throws IOException { + throw new IOException("Batch not supported in ThriftTable, use put(List puts), " + + "get(List gets) or delete(List deletes) respectively"); + + + } + + @Override + public void batchCallback(List actions, Object[] results, + Batch.Callback callback) throws IOException { + throw new IOException("BatchCallback not supported in ThriftTable, use put(List puts), " + + "get(List gets) or delete(List deletes) respectively"); + } + + @Override + public Result get(Get get) throws IOException { + TGet tGet = ThriftUtilities.getFromHBase(get); + try { + TResult tResult = client.get(tableNameInBytes, tGet); + return ThriftUtilities.resultFromThrift(tResult); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public Result[] get(List gets) throws IOException { + List tGets = ThriftUtilities.getsFromHBase(gets); + try { + List results = client.getMultiple(tableNameInBytes, tGets); + return ThriftUtilities.resultsFromThrift(results); + } catch (TException e) { + throw new IOException(e); + } + } + + /** + * A scanner to perform scan from thrift server + * getScannerResults is used in this scanner + */ + private class Scanner implements ResultScanner { + protected TScan scan; + protected Result lastResult = null; + protected final Queue cache = new ArrayDeque<>();; + + + public Scanner(Scan scan) throws IOException { + if (scan.getBatch() > 0) { + throw new IOException("Batch is not supported in Scanner"); + } + if (scan.getCaching() <= 0) { + scan.setCaching(scannerCaching); + } else if (scan.getCaching() == 1 && scan.isReversed()){ + // for reverse scan, we need to pass the last row to the next scanner + // we need caching number bigger than 1 + scan.setCaching(scan.getCaching() + 1); + } + this.scan = ThriftUtilities.scanFromHBase(scan); + } + + + @Override + public Result next() throws IOException { + if (cache.size() == 0) { + setupNextScanner(); + try { + List tResults = client + .getScannerResults(tableNameInBytes, scan, scan.getCaching()); + Result[] results = ThriftUtilities.resultsFromThrift(tResults); + boolean firstKey = true; + for (Result result : results) { + // If it is a reverse scan, we use the last result's key as the startkey, since there is + // no way to construct a closet rowkey smaller than the last result + // So when the results return, we must rule out the first result, since it has already + // returned to user. + if (firstKey) { + firstKey = false; + if (scan.isReversed() && lastResult != null) { + if (Bytes.equals(lastResult.getRow(), result.getRow())) { + continue; + } + } + } + cache.add(result); + lastResult = result; + } + } catch (TException e) { + throw new IOException(e); + } + } + + if (cache.size() > 0) { + return cache.poll(); + } else { + //scan finished + return null; + } + } + + @Override + public void close() { + } + + @Override + public boolean renewLease() { + throw new RuntimeException("renewLease() not supported"); + } + + @Override + public ScanMetrics getScanMetrics() { + throw new RuntimeException("getScanMetrics() not supported"); + } + + private void setupNextScanner() { + //if lastResult is null null, it means it is not the fist scan + if (lastResult!= null) { + byte[] lastRow = lastResult.getRow(); + if (scan.isReversed()) { + //for reverse scan, we can't find the closet row before this row + scan.setStartRow(lastRow); + } else { + scan.setStartRow(createClosestRowAfter(lastRow)); + } + } + } + + + /** + * Create the closest row after the specified row + */ + protected byte[] createClosestRowAfter(byte[] row) { + if (row == null) { + throw new RuntimeException("The passed row is null"); + } + return Arrays.copyOf(row, row.length + 1); + } + } + + @Override + public ResultScanner getScanner(Scan scan) throws IOException { + return new Scanner(scan); + } + + @Override + public ResultScanner getScanner(byte[] family) throws IOException { + Scan scan = new Scan(); + scan.addFamily(family); + return getScanner(scan); + } + + @Override + public ResultScanner getScanner(byte[] family, byte[] qualifier) throws IOException { + Scan scan = new Scan(); + scan.addColumn(family, qualifier); + return getScanner(scan); + } + + @Override + public void put(Put put) throws IOException { + TPut tPut = ThriftUtilities.putFromHBase(put); + try { + client.put(tableNameInBytes, tPut); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void put(List puts) throws IOException { + List tPuts = ThriftUtilities.putsFromHBase(puts); + try { + client.putMultiple(tableNameInBytes, tPuts); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void delete(Delete delete) throws IOException { + TDelete tDelete = ThriftUtilities.deleteFromHBase(delete); + try { + client.deleteSingle(tableNameInBytes, tDelete); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void delete(List deletes) throws IOException { + List tDeletes = ThriftUtilities.deletesFromHBase(deletes); + try { + client.deleteMultiple(tableNameInBytes, tDeletes); + } catch (TException e) { + throw new IOException(e); + } + } + + private class CheckAndMutateBuilderImpl implements CheckAndMutateBuilder { + + private final byte[] row; + private final byte[] family; + private byte[] qualifier; + private CompareOperator op; + private byte[] value; + + CheckAndMutateBuilderImpl(byte[] row, byte[] family) { + this.row = Preconditions.checkNotNull(row, "row is null"); + this.family = Preconditions.checkNotNull(family, "family is null"); + } + + @Override + public CheckAndMutateBuilder qualifier(byte[] qualifier) { + this.qualifier = Preconditions.checkNotNull(qualifier, "qualifier is null. Consider using" + + " an empty byte array, or just do not call this method if you want a null qualifier"); + return this; + } + + @Override + public CheckAndMutateBuilder timeRange(TimeRange timeRange) { + throw new NotImplementedException("timeRange not supported in ThriftTable"); + } + + @Override + public CheckAndMutateBuilder ifNotExists() { + this.op = CompareOperator.EQUAL; + this.value = null; + return this; + } + + @Override + public CheckAndMutateBuilder ifMatches(CompareOperator compareOp, byte[] value) { + this.op = Preconditions.checkNotNull(compareOp, "compareOp is null"); + this.value = Preconditions.checkNotNull(value, "value is null"); + return this; + } + + private void preCheck() { + Preconditions.checkNotNull(op, "condition is null. You need to specify the condition by" + + " calling ifNotExists/ifEquals/ifMatches before executing the request"); + } + + @Override + public boolean thenPut(Put put) throws IOException { + preCheck(); + RowMutations rowMutations = new RowMutations(put.getRow()); + rowMutations.add(put); + return checkAndMutate(row, family, qualifier, op, value, rowMutations); + } + + @Override + public boolean thenDelete(Delete delete) throws IOException { + preCheck(); + RowMutations rowMutations = new RowMutations(delete.getRow()); + rowMutations.add(delete); + return checkAndMutate(row, family, qualifier, op, value, rowMutations); + } + + @Override + public boolean thenMutate(RowMutations mutation) throws IOException { + preCheck(); + return checkAndMutate(row, family, qualifier, op, value, mutation); + } + } + + + @Override + public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, + byte[] value, RowMutations mutation) throws IOException { + try { + ByteBuffer valueBuffer = value == null? null : ByteBuffer.wrap(value); + return client.checkAndMutate(tableNameInBytes, ByteBuffer.wrap(row), ByteBuffer.wrap(family), + ByteBuffer.wrap(qualifier), ThriftUtilities.compareOpFromHBase(op), valueBuffer, + ThriftUtilities.rowMutationsFromHBase(mutation)); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) { + return new CheckAndMutateBuilderImpl(row, family); + } + + @Override + public void mutateRow(RowMutations rm) throws IOException { + TRowMutations tRowMutations = ThriftUtilities.rowMutationsFromHBase(rm); + try { + client.mutateRow(tableNameInBytes, tRowMutations); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public Result append(Append append) throws IOException { + TAppend tAppend = ThriftUtilities.appendFromHBase(append); + try { + TResult tResult = client.append(tableNameInBytes, tAppend); + return ThriftUtilities.resultFromThrift(tResult); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public Result increment(Increment increment) throws IOException { + TIncrement tIncrement = ThriftUtilities.incrementFromHBase(increment); + try { + TResult tResult = client.increment(tableNameInBytes, tIncrement); + return ThriftUtilities.resultFromThrift(tResult); + } catch (TException e) { + throw new IOException(e); + } + } + + @Override + public void close() throws IOException { + tTransport.close(); + } + + @Override + public long getRpcTimeout(TimeUnit unit) { + return unit.convert(operationTimeout, TimeUnit.MILLISECONDS); + } + + @Override + public long getReadRpcTimeout(TimeUnit unit) { + return unit.convert(operationTimeout, TimeUnit.MILLISECONDS); + } + + @Override + public long getWriteRpcTimeout(TimeUnit unit) { + return unit.convert(operationTimeout, TimeUnit.MILLISECONDS); + } + + @Override + public long getOperationTimeout(TimeUnit unit) { + return unit.convert(operationTimeout, TimeUnit.MILLISECONDS); + } + + @Override + public CoprocessorRpcChannel coprocessorService(byte[] row) { + throw new NotImplementedException("coprocessorService not supported in ThriftTable"); + } + +} diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java index 1b9d6fc2318..d2aa0d3d6ec 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAppend.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TAppend implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java index 916dc0c8c1e..db5c916c54d 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TAuthorization.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TAuthorization implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAuthorization"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TCellVisibility.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TCellVisibility.java index e9af9fe6f04..bc0d513e766 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TCellVisibility.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TCellVisibility.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TCellVisibility implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCellVisibility"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java index 11e3acede6e..87345391ed0 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java @@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory; * in a HBase table by column family and optionally * a column qualifier and timestamp */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TColumn implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnFamilyDescriptor.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnFamilyDescriptor.java index df4656fc865..233533398e0 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnFamilyDescriptor.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnFamilyDescriptor.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * Thrift wrapper around * org.apache.hadoop.hbase.client.ColumnFamilyDescriptor */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TColumnFamilyDescriptor implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnFamilyDescriptor"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java index 18c5ff21ced..08574c3d254 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * Represents a single cell and the amount to increment it by */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TColumnIncrement implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnIncrement"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java index 13551419d17..e2105bc624f 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * Represents a single cell and its value. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TColumnValue implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnValue"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java index cdce4fbd3b9..c3e1543f1f7 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java @@ -60,7 +60,7 @@ import org.slf4j.LoggerFactory; * by changing the durability. If you don't provide durability, it defaults to * column family's default setting for durability. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TDelete implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TDelete"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java index b38d936d9f0..676275a7650 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java @@ -47,7 +47,7 @@ import org.slf4j.LoggerFactory; * If you specify a time range and a timestamp the range is ignored. * Timestamps on TColumns are ignored. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TGet implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet"); @@ -65,6 +65,7 @@ public class TGet implements org.apache.thrift.TBase, java.i private static final org.apache.thrift.protocol.TField STORE_LIMIT_FIELD_DESC = new org.apache.thrift.protocol.TField("storeLimit", org.apache.thrift.protocol.TType.I32, (short)12); private static final org.apache.thrift.protocol.TField STORE_OFFSET_FIELD_DESC = new org.apache.thrift.protocol.TField("storeOffset", org.apache.thrift.protocol.TType.I32, (short)13); private static final org.apache.thrift.protocol.TField EXISTENCE_ONLY_FIELD_DESC = new org.apache.thrift.protocol.TField("existence_only", org.apache.thrift.protocol.TType.BOOL, (short)14); + private static final org.apache.thrift.protocol.TField FILTER_BYTES_FIELD_DESC = new org.apache.thrift.protocol.TField("filterBytes", org.apache.thrift.protocol.TType.STRING, (short)15); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -90,6 +91,7 @@ public class TGet implements org.apache.thrift.TBase, java.i public int storeLimit; // optional public int storeOffset; // optional public boolean existence_only; // optional + public ByteBuffer filterBytes; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -110,7 +112,8 @@ public class TGet implements org.apache.thrift.TBase, java.i CACHE_BLOCKS((short)11, "cacheBlocks"), STORE_LIMIT((short)12, "storeLimit"), STORE_OFFSET((short)13, "storeOffset"), - EXISTENCE_ONLY((short)14, "existence_only"); + EXISTENCE_ONLY((short)14, "existence_only"), + FILTER_BYTES((short)15, "filterBytes"); private static final Map byName = new HashMap(); @@ -153,6 +156,8 @@ public class TGet implements org.apache.thrift.TBase, java.i return STORE_OFFSET; case 14: // EXISTENCE_ONLY return EXISTENCE_ONLY; + case 15: // FILTER_BYTES + return FILTER_BYTES; default: return null; } @@ -201,7 +206,7 @@ public class TGet implements org.apache.thrift.TBase, java.i private static final int __STOREOFFSET_ISSET_ID = 5; private static final int __EXISTENCE_ONLY_ISSET_ID = 6; private byte __isset_bitfield = 0; - private static final _Fields optionals[] = {_Fields.COLUMNS,_Fields.TIMESTAMP,_Fields.TIME_RANGE,_Fields.MAX_VERSIONS,_Fields.FILTER_STRING,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS,_Fields.CONSISTENCY,_Fields.TARGET_REPLICA_ID,_Fields.CACHE_BLOCKS,_Fields.STORE_LIMIT,_Fields.STORE_OFFSET,_Fields.EXISTENCE_ONLY}; + private static final _Fields optionals[] = {_Fields.COLUMNS,_Fields.TIMESTAMP,_Fields.TIME_RANGE,_Fields.MAX_VERSIONS,_Fields.FILTER_STRING,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS,_Fields.CONSISTENCY,_Fields.TARGET_REPLICA_ID,_Fields.CACHE_BLOCKS,_Fields.STORE_LIMIT,_Fields.STORE_OFFSET,_Fields.EXISTENCE_ONLY,_Fields.FILTER_BYTES}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); @@ -236,6 +241,8 @@ public class TGet implements org.apache.thrift.TBase, java.i new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); tmpMap.put(_Fields.EXISTENCE_ONLY, new org.apache.thrift.meta_data.FieldMetaData("existence_only", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); + tmpMap.put(_Fields.FILTER_BYTES, new org.apache.thrift.meta_data.FieldMetaData("filterBytes", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TGet.class, metaDataMap); } @@ -288,6 +295,9 @@ public class TGet implements org.apache.thrift.TBase, java.i this.storeLimit = other.storeLimit; this.storeOffset = other.storeOffset; this.existence_only = other.existence_only; + if (other.isSetFilterBytes()) { + this.filterBytes = org.apache.thrift.TBaseHelper.copyBinary(other.filterBytes); + } } public TGet deepCopy() { @@ -317,6 +327,7 @@ public class TGet implements org.apache.thrift.TBase, java.i this.storeOffset = 0; setExistence_onlyIsSet(false); this.existence_only = false; + this.filterBytes = null; } public byte[] getRow() { @@ -702,6 +713,40 @@ public class TGet implements org.apache.thrift.TBase, java.i __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __EXISTENCE_ONLY_ISSET_ID, value); } + public byte[] getFilterBytes() { + setFilterBytes(org.apache.thrift.TBaseHelper.rightSize(filterBytes)); + return filterBytes == null ? null : filterBytes.array(); + } + + public ByteBuffer bufferForFilterBytes() { + return org.apache.thrift.TBaseHelper.copyBinary(filterBytes); + } + + public TGet setFilterBytes(byte[] filterBytes) { + this.filterBytes = filterBytes == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(filterBytes, filterBytes.length)); + return this; + } + + public TGet setFilterBytes(ByteBuffer filterBytes) { + this.filterBytes = org.apache.thrift.TBaseHelper.copyBinary(filterBytes); + return this; + } + + public void unsetFilterBytes() { + this.filterBytes = null; + } + + /** Returns true if field filterBytes is set (has been assigned a value) and false otherwise */ + public boolean isSetFilterBytes() { + return this.filterBytes != null; + } + + public void setFilterBytesIsSet(boolean value) { + if (!value) { + this.filterBytes = null; + } + } + public void setFieldValue(_Fields field, Object value) { switch (field) { case ROW: @@ -816,6 +861,14 @@ public class TGet implements org.apache.thrift.TBase, java.i } break; + case FILTER_BYTES: + if (value == null) { + unsetFilterBytes(); + } else { + setFilterBytes((ByteBuffer)value); + } + break; + } } @@ -863,6 +916,9 @@ public class TGet implements org.apache.thrift.TBase, java.i case EXISTENCE_ONLY: return isExistence_only(); + case FILTER_BYTES: + return getFilterBytes(); + } throw new IllegalStateException(); } @@ -902,6 +958,8 @@ public class TGet implements org.apache.thrift.TBase, java.i return isSetStoreOffset(); case EXISTENCE_ONLY: return isSetExistence_only(); + case FILTER_BYTES: + return isSetFilterBytes(); } throw new IllegalStateException(); } @@ -1045,6 +1103,15 @@ public class TGet implements org.apache.thrift.TBase, java.i return false; } + boolean this_present_filterBytes = true && this.isSetFilterBytes(); + boolean that_present_filterBytes = true && that.isSetFilterBytes(); + if (this_present_filterBytes || that_present_filterBytes) { + if (!(this_present_filterBytes && that_present_filterBytes)) + return false; + if (!this.filterBytes.equals(that.filterBytes)) + return false; + } + return true; } @@ -1122,6 +1189,11 @@ public class TGet implements org.apache.thrift.TBase, java.i if (present_existence_only) list.add(existence_only); + boolean present_filterBytes = true && (isSetFilterBytes()); + list.add(present_filterBytes); + if (present_filterBytes) + list.add(filterBytes); + return list.hashCode(); } @@ -1273,6 +1345,16 @@ public class TGet implements org.apache.thrift.TBase, java.i return lastComparison; } } + lastComparison = Boolean.valueOf(isSetFilterBytes()).compareTo(other.isSetFilterBytes()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetFilterBytes()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterBytes, other.filterBytes); + if (lastComparison != 0) { + return lastComparison; + } + } return 0; } @@ -1402,6 +1484,16 @@ public class TGet implements org.apache.thrift.TBase, java.i sb.append(this.existence_only); first = false; } + if (isSetFilterBytes()) { + if (!first) sb.append(", "); + sb.append("filterBytes:"); + if (this.filterBytes == null) { + sb.append("null"); + } else { + org.apache.thrift.TBaseHelper.toString(this.filterBytes, sb); + } + first = false; + } sb.append(")"); return sb.toString(); } @@ -1593,6 +1685,14 @@ public class TGet implements org.apache.thrift.TBase, java.i org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 15: // FILTER_BYTES + if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { + struct.filterBytes = iprot.readBinary(); + struct.setFilterBytesIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -1705,6 +1805,13 @@ public class TGet implements org.apache.thrift.TBase, java.i oprot.writeBool(struct.existence_only); oprot.writeFieldEnd(); } + if (struct.filterBytes != null) { + if (struct.isSetFilterBytes()) { + oprot.writeFieldBegin(FILTER_BYTES_FIELD_DESC); + oprot.writeBinary(struct.filterBytes); + oprot.writeFieldEnd(); + } + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -1763,7 +1870,10 @@ public class TGet implements org.apache.thrift.TBase, java.i if (struct.isSetExistence_only()) { optionals.set(12); } - oprot.writeBitSet(optionals, 13); + if (struct.isSetFilterBytes()) { + optionals.set(13); + } + oprot.writeBitSet(optionals, 14); if (struct.isSetColumns()) { { oprot.writeI32(struct.columns.size()); @@ -1816,6 +1926,9 @@ public class TGet implements org.apache.thrift.TBase, java.i if (struct.isSetExistence_only()) { oprot.writeBool(struct.existence_only); } + if (struct.isSetFilterBytes()) { + oprot.writeBinary(struct.filterBytes); + } } @Override @@ -1823,7 +1936,7 @@ public class TGet implements org.apache.thrift.TBase, java.i TTupleProtocol iprot = (TTupleProtocol) prot; struct.row = iprot.readBinary(); struct.setRowIsSet(true); - BitSet incoming = iprot.readBitSet(13); + BitSet incoming = iprot.readBitSet(14); if (incoming.get(0)) { { org.apache.thrift.protocol.TList _list27 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); @@ -1899,6 +2012,10 @@ public class TGet implements org.apache.thrift.TBase, java.i struct.existence_only = iprot.readBool(); struct.setExistence_onlyIsSet(true); } + if (incoming.get(13)) { + struct.filterBytes = iprot.readBinary(); + struct.setFilterBytesIsSet(true); + } } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java index bb5db4dd6e0..880af450d06 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class THBaseService { public interface Iface { @@ -30957,7 +30957,7 @@ public class THBaseService { public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); - tmpMap.put(_Fields.REGEX, new org.apache.thrift.meta_data.FieldMetaData("regex", org.apache.thrift.TFieldRequirementType.REQUIRED, + tmpMap.put(_Fields.REGEX, new org.apache.thrift.meta_data.FieldMetaData("regex", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.INCLUDE_SYS_TABLES, new org.apache.thrift.meta_data.FieldMetaData("includeSysTables", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); @@ -31223,9 +31223,6 @@ public class THBaseService { public void validate() throws org.apache.thrift.TException { // check for required fields - if (regex == null) { - throw new org.apache.thrift.protocol.TProtocolException("Required field 'regex' was not present! Struct: " + toString()); - } // alas, we cannot check 'includeSysTables' because it's a primitive and you chose the non-beans generator. // check for sub-struct validity } @@ -31325,17 +31322,27 @@ public class THBaseService { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getTableDescriptorsByPattern_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; - oprot.writeString(struct.regex); oprot.writeBool(struct.includeSysTables); + BitSet optionals = new BitSet(); + if (struct.isSetRegex()) { + optionals.set(0); + } + oprot.writeBitSet(optionals, 1); + if (struct.isSetRegex()) { + oprot.writeString(struct.regex); + } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getTableDescriptorsByPattern_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - struct.regex = iprot.readString(); - struct.setRegexIsSet(true); struct.includeSysTables = iprot.readBool(); struct.setIncludeSysTablesIsSet(true); + BitSet incoming = iprot.readBitSet(1); + if (incoming.get(0)) { + struct.regex = iprot.readString(); + struct.setRegexIsSet(true); + } } } @@ -32845,7 +32852,7 @@ public class THBaseService { public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); - tmpMap.put(_Fields.REGEX, new org.apache.thrift.meta_data.FieldMetaData("regex", org.apache.thrift.TFieldRequirementType.REQUIRED, + tmpMap.put(_Fields.REGEX, new org.apache.thrift.meta_data.FieldMetaData("regex", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.INCLUDE_SYS_TABLES, new org.apache.thrift.meta_data.FieldMetaData("includeSysTables", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); @@ -33111,9 +33118,6 @@ public class THBaseService { public void validate() throws org.apache.thrift.TException { // check for required fields - if (regex == null) { - throw new org.apache.thrift.protocol.TProtocolException("Required field 'regex' was not present! Struct: " + toString()); - } // alas, we cannot check 'includeSysTables' because it's a primitive and you chose the non-beans generator. // check for sub-struct validity } @@ -33213,17 +33217,27 @@ public class THBaseService { @Override public void write(org.apache.thrift.protocol.TProtocol prot, getTableNamesByPattern_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; - oprot.writeString(struct.regex); oprot.writeBool(struct.includeSysTables); + BitSet optionals = new BitSet(); + if (struct.isSetRegex()) { + optionals.set(0); + } + oprot.writeBitSet(optionals, 1); + if (struct.isSetRegex()) { + oprot.writeString(struct.regex); + } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, getTableNamesByPattern_args struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - struct.regex = iprot.readString(); - struct.setRegexIsSet(true); struct.includeSysTables = iprot.readBool(); struct.setIncludeSysTablesIsSet(true); + BitSet incoming = iprot.readBitSet(1); + if (incoming.get(0)) { + struct.regex = iprot.readString(); + struct.setRegexIsSet(true); + } } } @@ -34733,7 +34747,7 @@ public class THBaseService { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DESC, new org.apache.thrift.meta_data.FieldMetaData("desc", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TTableDescriptor.class))); - tmpMap.put(_Fields.SPLIT_KEYS, new org.apache.thrift.meta_data.FieldMetaData("splitKeys", org.apache.thrift.TFieldRequirementType.REQUIRED, + tmpMap.put(_Fields.SPLIT_KEYS, new org.apache.thrift.meta_data.FieldMetaData("splitKeys", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); metaDataMap = Collections.unmodifiableMap(tmpMap); @@ -35021,9 +35035,6 @@ public class THBaseService { if (desc == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'desc' was not present! Struct: " + toString()); } - if (splitKeys == null) { - throw new org.apache.thrift.protocol.TProtocolException("Required field 'splitKeys' was not present! Struct: " + toString()); - } // check for sub-struct validity if (desc != null) { desc.validate(); @@ -35141,11 +35152,18 @@ public class THBaseService { public void write(org.apache.thrift.protocol.TProtocol prot, createTable_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; struct.desc.write(oprot); - { - oprot.writeI32(struct.splitKeys.size()); - for (ByteBuffer _iter322 : struct.splitKeys) + BitSet optionals = new BitSet(); + if (struct.isSetSplitKeys()) { + optionals.set(0); + } + oprot.writeBitSet(optionals, 1); + if (struct.isSetSplitKeys()) { { - oprot.writeBinary(_iter322); + oprot.writeI32(struct.splitKeys.size()); + for (ByteBuffer _iter322 : struct.splitKeys) + { + oprot.writeBinary(_iter322); + } } } } @@ -35156,17 +35174,20 @@ public class THBaseService { struct.desc = new TTableDescriptor(); struct.desc.read(iprot); struct.setDescIsSet(true); - { - org.apache.thrift.protocol.TList _list323 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); - struct.splitKeys = new ArrayList(_list323.size); - ByteBuffer _elem324; - for (int _i325 = 0; _i325 < _list323.size; ++_i325) + BitSet incoming = iprot.readBitSet(1); + if (incoming.get(0)) { { - _elem324 = iprot.readBinary(); - struct.splitKeys.add(_elem324); + org.apache.thrift.protocol.TList _list323 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); + struct.splitKeys = new ArrayList(_list323.size); + ByteBuffer _elem324; + for (int _i325 = 0; _i325 < _list323.size; ++_i325) + { + _elem324 = iprot.readBinary(); + struct.splitKeys.add(_elem324); + } } + struct.setSplitKeysIsSet(true); } - struct.setSplitKeysIsSet(true); } } @@ -41194,7 +41215,7 @@ public class THBaseService { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TTableName.class))); - tmpMap.put(_Fields.SPLIT_KEYS, new org.apache.thrift.meta_data.FieldMetaData("splitKeys", org.apache.thrift.TFieldRequirementType.REQUIRED, + tmpMap.put(_Fields.SPLIT_KEYS, new org.apache.thrift.meta_data.FieldMetaData("splitKeys", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)))); metaDataMap = Collections.unmodifiableMap(tmpMap); @@ -41482,9 +41503,6 @@ public class THBaseService { if (tableName == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'tableName' was not present! Struct: " + toString()); } - if (splitKeys == null) { - throw new org.apache.thrift.protocol.TProtocolException("Required field 'splitKeys' was not present! Struct: " + toString()); - } // check for sub-struct validity if (tableName != null) { tableName.validate(); @@ -41602,11 +41620,18 @@ public class THBaseService { public void write(org.apache.thrift.protocol.TProtocol prot, isTableAvailableWithSplit_args struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; struct.tableName.write(oprot); - { - oprot.writeI32(struct.splitKeys.size()); - for (ByteBuffer _iter330 : struct.splitKeys) + BitSet optionals = new BitSet(); + if (struct.isSetSplitKeys()) { + optionals.set(0); + } + oprot.writeBitSet(optionals, 1); + if (struct.isSetSplitKeys()) { { - oprot.writeBinary(_iter330); + oprot.writeI32(struct.splitKeys.size()); + for (ByteBuffer _iter330 : struct.splitKeys) + { + oprot.writeBinary(_iter330); + } } } } @@ -41617,17 +41642,20 @@ public class THBaseService { struct.tableName = new TTableName(); struct.tableName.read(iprot); struct.setTableNameIsSet(true); - { - org.apache.thrift.protocol.TList _list331 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); - struct.splitKeys = new ArrayList(_list331.size); - ByteBuffer _elem332; - for (int _i333 = 0; _i333 < _list331.size; ++_i333) + BitSet incoming = iprot.readBitSet(1); + if (incoming.get(0)) { { - _elem332 = iprot.readBinary(); - struct.splitKeys.add(_elem332); + org.apache.thrift.protocol.TList _list331 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32()); + struct.splitKeys = new ArrayList(_list331.size); + ByteBuffer _elem332; + for (int _i333 = 0; _i333 < _list331.size; ++_i333) + { + _elem332 = iprot.readBinary(); + struct.splitKeys.add(_elem332); + } } + struct.setSplitKeysIsSet(true); } - struct.setSplitKeysIsSet(true); } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionInfo.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionInfo.java index b430d4d576e..f74764824b4 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionInfo.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionInfo.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class THRegionInfo implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionInfo"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionLocation.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionLocation.java index f64951e89b4..8263eb5e8a0 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionLocation.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/THRegionLocation.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class THRegionLocation implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionLocation"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java index ccb85148c3f..86d2715c025 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java @@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory; * to the HBase master or a HBase region server. Also used to return * more general HBase error conditions. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TIOError extends TException implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIOError"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java index 4436fc09dc9..9a913924340 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * A TIllegalArgument exception indicates an illegal or invalid * argument was passed into a procedure. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TIllegalArgument extends TException implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIllegalArgument"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java index 70d8eab7ed6..6eaac6828a3 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java @@ -41,7 +41,7 @@ import org.slf4j.LoggerFactory; * by changing the durability. If you don't provide durability, it defaults to * column family's default setting for durability. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TIncrement implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TNamespaceDescriptor.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TNamespaceDescriptor.java index 1c0b41bc6a4..320e26d6adf 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TNamespaceDescriptor.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TNamespaceDescriptor.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * Thrift wrapper around * org.apache.hadoop.hbase.NamespaceDescriptor */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TNamespaceDescriptor implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TNamespaceDescriptor"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java index 5739d78329e..f277d1fb558 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java @@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory; * by changing the durability. If you don't provide durability, it defaults to * column family's default setting for durability. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TPut implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPut"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java index 3d40ea2fec1..3e668f2d823 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * if no Result is found, row and columnValues will not be set. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TResult implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TResult"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java index cc846f5182a..82c1ef779f4 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TRowMutations.java @@ -37,7 +37,7 @@ import org.slf4j.LoggerFactory; /** * A TRowMutations object is used to apply a number of Mutations to a single row. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TRowMutations implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowMutations"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java index 99046ccdc8c..6284b4af6af 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * Any timestamps in the columns are ignored but the colFamTimeRangeMap included, use timeRange to select by timestamp. * Max versions defaults to 1. */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TScan implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); @@ -59,6 +59,7 @@ public class TScan implements org.apache.thrift.TBase, jav private static final org.apache.thrift.protocol.TField LIMIT_FIELD_DESC = new org.apache.thrift.protocol.TField("limit", org.apache.thrift.protocol.TType.I32, (short)15); private static final org.apache.thrift.protocol.TField CONSISTENCY_FIELD_DESC = new org.apache.thrift.protocol.TField("consistency", org.apache.thrift.protocol.TType.I32, (short)16); private static final org.apache.thrift.protocol.TField TARGET_REPLICA_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("targetReplicaId", org.apache.thrift.protocol.TType.I32, (short)17); + private static final org.apache.thrift.protocol.TField FILTER_BYTES_FIELD_DESC = new org.apache.thrift.protocol.TField("filterBytes", org.apache.thrift.protocol.TType.STRING, (short)18); private static final Map, SchemeFactory> schemes = new HashMap, SchemeFactory>(); static { @@ -91,6 +92,7 @@ public class TScan implements org.apache.thrift.TBase, jav */ public TConsistency consistency; // optional public int targetReplicaId; // optional + public ByteBuffer filterBytes; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { @@ -118,7 +120,8 @@ public class TScan implements org.apache.thrift.TBase, jav * @see TConsistency */ CONSISTENCY((short)16, "consistency"), - TARGET_REPLICA_ID((short)17, "targetReplicaId"); + TARGET_REPLICA_ID((short)17, "targetReplicaId"), + FILTER_BYTES((short)18, "filterBytes"); private static final Map byName = new HashMap(); @@ -167,6 +170,8 @@ public class TScan implements org.apache.thrift.TBase, jav return CONSISTENCY; case 17: // TARGET_REPLICA_ID return TARGET_REPLICA_ID; + case 18: // FILTER_BYTES + return FILTER_BYTES; default: return null; } @@ -215,7 +220,7 @@ public class TScan implements org.apache.thrift.TBase, jav private static final int __LIMIT_ISSET_ID = 5; private static final int __TARGETREPLICAID_ISSET_ID = 6; private byte __isset_bitfield = 0; - private static final _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS,_Fields.REVERSED,_Fields.CACHE_BLOCKS,_Fields.COL_FAM_TIME_RANGE_MAP,_Fields.READ_TYPE,_Fields.LIMIT,_Fields.CONSISTENCY,_Fields.TARGET_REPLICA_ID}; + private static final _Fields optionals[] = {_Fields.START_ROW,_Fields.STOP_ROW,_Fields.COLUMNS,_Fields.CACHING,_Fields.MAX_VERSIONS,_Fields.TIME_RANGE,_Fields.FILTER_STRING,_Fields.BATCH_SIZE,_Fields.ATTRIBUTES,_Fields.AUTHORIZATIONS,_Fields.REVERSED,_Fields.CACHE_BLOCKS,_Fields.COL_FAM_TIME_RANGE_MAP,_Fields.READ_TYPE,_Fields.LIMIT,_Fields.CONSISTENCY,_Fields.TARGET_REPLICA_ID,_Fields.FILTER_BYTES}; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); @@ -258,6 +263,8 @@ public class TScan implements org.apache.thrift.TBase, jav new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TConsistency.class))); tmpMap.put(_Fields.TARGET_REPLICA_ID, new org.apache.thrift.meta_data.FieldMetaData("targetReplicaId", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))); + tmpMap.put(_Fields.FILTER_BYTES, new org.apache.thrift.meta_data.FieldMetaData("filterBytes", org.apache.thrift.TFieldRequirementType.OPTIONAL, + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TScan.class, metaDataMap); } @@ -326,6 +333,9 @@ public class TScan implements org.apache.thrift.TBase, jav this.consistency = other.consistency; } this.targetReplicaId = other.targetReplicaId; + if (other.isSetFilterBytes()) { + this.filterBytes = org.apache.thrift.TBaseHelper.copyBinary(other.filterBytes); + } } public TScan deepCopy() { @@ -358,6 +368,7 @@ public class TScan implements org.apache.thrift.TBase, jav this.consistency = null; setTargetReplicaIdIsSet(false); this.targetReplicaId = 0; + this.filterBytes = null; } public byte[] getStartRow() { @@ -844,6 +855,40 @@ public class TScan implements org.apache.thrift.TBase, jav __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __TARGETREPLICAID_ISSET_ID, value); } + public byte[] getFilterBytes() { + setFilterBytes(org.apache.thrift.TBaseHelper.rightSize(filterBytes)); + return filterBytes == null ? null : filterBytes.array(); + } + + public ByteBuffer bufferForFilterBytes() { + return org.apache.thrift.TBaseHelper.copyBinary(filterBytes); + } + + public TScan setFilterBytes(byte[] filterBytes) { + this.filterBytes = filterBytes == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(filterBytes, filterBytes.length)); + return this; + } + + public TScan setFilterBytes(ByteBuffer filterBytes) { + this.filterBytes = org.apache.thrift.TBaseHelper.copyBinary(filterBytes); + return this; + } + + public void unsetFilterBytes() { + this.filterBytes = null; + } + + /** Returns true if field filterBytes is set (has been assigned a value) and false otherwise */ + public boolean isSetFilterBytes() { + return this.filterBytes != null; + } + + public void setFilterBytesIsSet(boolean value) { + if (!value) { + this.filterBytes = null; + } + } + public void setFieldValue(_Fields field, Object value) { switch (field) { case START_ROW: @@ -982,6 +1027,14 @@ public class TScan implements org.apache.thrift.TBase, jav } break; + case FILTER_BYTES: + if (value == null) { + unsetFilterBytes(); + } else { + setFilterBytes((ByteBuffer)value); + } + break; + } } @@ -1038,6 +1091,9 @@ public class TScan implements org.apache.thrift.TBase, jav case TARGET_REPLICA_ID: return getTargetReplicaId(); + case FILTER_BYTES: + return getFilterBytes(); + } throw new IllegalStateException(); } @@ -1083,6 +1139,8 @@ public class TScan implements org.apache.thrift.TBase, jav return isSetConsistency(); case TARGET_REPLICA_ID: return isSetTargetReplicaId(); + case FILTER_BYTES: + return isSetFilterBytes(); } throw new IllegalStateException(); } @@ -1253,6 +1311,15 @@ public class TScan implements org.apache.thrift.TBase, jav return false; } + boolean this_present_filterBytes = true && this.isSetFilterBytes(); + boolean that_present_filterBytes = true && that.isSetFilterBytes(); + if (this_present_filterBytes || that_present_filterBytes) { + if (!(this_present_filterBytes && that_present_filterBytes)) + return false; + if (!this.filterBytes.equals(that.filterBytes)) + return false; + } + return true; } @@ -1345,6 +1412,11 @@ public class TScan implements org.apache.thrift.TBase, jav if (present_targetReplicaId) list.add(targetReplicaId); + boolean present_filterBytes = true && (isSetFilterBytes()); + list.add(present_filterBytes); + if (present_filterBytes) + list.add(filterBytes); + return list.hashCode(); } @@ -1526,6 +1598,16 @@ public class TScan implements org.apache.thrift.TBase, jav return lastComparison; } } + lastComparison = Boolean.valueOf(isSetFilterBytes()).compareTo(other.isSetFilterBytes()); + if (lastComparison != 0) { + return lastComparison; + } + if (isSetFilterBytes()) { + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.filterBytes, other.filterBytes); + if (lastComparison != 0) { + return lastComparison; + } + } return 0; } @@ -1687,6 +1769,16 @@ public class TScan implements org.apache.thrift.TBase, jav sb.append(this.targetReplicaId); first = false; } + if (isSetFilterBytes()) { + if (!first) sb.append(", "); + sb.append("filterBytes:"); + if (this.filterBytes == null) { + sb.append("null"); + } else { + org.apache.thrift.TBaseHelper.toString(this.filterBytes, sb); + } + first = false; + } sb.append(")"); return sb.toString(); } @@ -1912,6 +2004,14 @@ public class TScan implements org.apache.thrift.TBase, jav org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; + case 18: // FILTER_BYTES + if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { + struct.filterBytes = iprot.readBinary(); + struct.setFilterBytesIsSet(true); + } else { + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); + } + break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } @@ -2055,6 +2155,13 @@ public class TScan implements org.apache.thrift.TBase, jav oprot.writeI32(struct.targetReplicaId); oprot.writeFieldEnd(); } + if (struct.filterBytes != null) { + if (struct.isSetFilterBytes()) { + oprot.writeFieldBegin(FILTER_BYTES_FIELD_DESC); + oprot.writeBinary(struct.filterBytes); + oprot.writeFieldEnd(); + } + } oprot.writeFieldStop(); oprot.writeStructEnd(); } @@ -2124,7 +2231,10 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetTargetReplicaId()) { optionals.set(16); } - oprot.writeBitSet(optionals, 17); + if (struct.isSetFilterBytes()) { + optionals.set(17); + } + oprot.writeBitSet(optionals, 18); if (struct.isSetStartRow()) { oprot.writeBinary(struct.startRow); } @@ -2196,12 +2306,15 @@ public class TScan implements org.apache.thrift.TBase, jav if (struct.isSetTargetReplicaId()) { oprot.writeI32(struct.targetReplicaId); } + if (struct.isSetFilterBytes()) { + oprot.writeBinary(struct.filterBytes); + } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TScan struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; - BitSet incoming = iprot.readBitSet(17); + BitSet incoming = iprot.readBitSet(18); if (incoming.get(0)) { struct.startRow = iprot.readBinary(); struct.setStartRowIsSet(true); @@ -2305,6 +2418,10 @@ public class TScan implements org.apache.thrift.TBase, jav struct.targetReplicaId = iprot.readI32(); struct.setTargetReplicaIdIsSet(true); } + if (incoming.get(17)) { + struct.filterBytes = iprot.readBinary(); + struct.setFilterBytesIsSet(true); + } } } diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TServerName.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TServerName.java index e7e0372de28..a19503becb1 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TServerName.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TServerName.java @@ -34,7 +34,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"}) -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TServerName implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TServerName"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableDescriptor.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableDescriptor.java index 8e53bdfa9c0..288709e4d3b 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableDescriptor.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableDescriptor.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * Thrift wrapper around * org.apache.hadoop.hbase.client.TableDescriptor */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TTableDescriptor implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableDescriptor"); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableName.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableName.java index cec268a14a3..21b0bffb107 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableName.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTableName.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * Thrift wrapper around * org.apache.hadoop.hbase.TableName */ -@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2018-12-28") +@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-01-03") public class TTableName implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableName"); @@ -54,7 +54,7 @@ public class TTableName implements org.apache.thrift.TBase metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); - tmpMap.put(_Fields.NS, new org.apache.thrift.meta_data.FieldMetaData("ns", org.apache.thrift.TFieldRequirementType.REQUIRED, + tmpMap.put(_Fields.NS, new org.apache.thrift.meta_data.FieldMetaData("ns", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); tmpMap.put(_Fields.QUALIFIER, new org.apache.thrift.meta_data.FieldMetaData("qualifier", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true))); @@ -143,11 +144,9 @@ public class TTableName implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTimeRange"); diff --git a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift index c1b94ef0e85..ef2dae50982 100644 --- a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift +++ b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift @@ -146,6 +146,7 @@ struct TGet { 12: optional i32 storeLimit 13: optional i32 storeOffset 14: optional bool existence_only + 15: optional binary filterBytes } @@ -262,6 +263,8 @@ struct TScan { 15: optional i32 limit 16: optional TConsistency consistency 17: optional i32 targetReplicaId + 18: optional binary filterBytes + } /** @@ -402,7 +405,7 @@ enum TKeepDeletedCells { */ struct TTableName { /** namespace name */ - 1: required binary ns + 1: optional binary ns /** tablename */ 2: required binary qualifier } @@ -828,7 +831,7 @@ service THBaseService { **/ list getTableDescriptorsByPattern( /** The regular expression to match against */ - 1: required string regex + 1: optional string regex /** set to false if match only against userspace tables */ 2: required bool includeSysTables ) throws (1: TIOError io) @@ -848,7 +851,7 @@ service THBaseService { **/ list getTableNamesByPattern( /** The regular expression to match against */ - 1: required string regex + 1: optional string regex /** set to false if match only against userspace tables */ 2: required bool includeSysTables ) throws (1: TIOError io) @@ -871,7 +874,7 @@ service THBaseService { /** table descriptor for table */ 1: required TTableDescriptor desc /** rray of split keys for the initial regions of the table */ - 2: required list splitKeys + 2: optional list splitKeys ) throws (1: TIOError io) /** @@ -946,7 +949,7 @@ service THBaseService { /** the tablename to check */ 1: required TTableName tableName /** keys to check if the table has been created with all split keys */ - 2: required list splitKeys + 2: optional list splitKeys ) throws (1: TIOError io) /** diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftConnection.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftConnection.java new file mode 100644 index 00000000000..158361958f1 --- /dev/null +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftConnection.java @@ -0,0 +1,841 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.thrift2; + +import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_INFO_SERVER_PORT; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ClusterConnection; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.Delete; +import org.apache.hadoop.hbase.client.Durability; +import org.apache.hadoop.hbase.client.Get; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.filter.ColumnValueFilter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.testclassification.RestTests; +import org.apache.hadoop.hbase.thrift.Constants; +import org.apache.hadoop.hbase.thrift2.client.ThriftConnection; +import org.apache.hadoop.hbase.util.Bytes; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Category({ RestTests.class, MediumTests.class}) + +public class TestThriftConnection { + private static final Logger LOG = + LoggerFactory.getLogger(TestThriftConnection.class); + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestThriftConnection.class); + + private static final byte[] FAMILYA = Bytes.toBytes("fa"); + private static final byte[] FAMILYB = Bytes.toBytes("fb"); + private static final byte[] FAMILYC = Bytes.toBytes("fc"); + private static final byte[] FAMILYD = Bytes.toBytes("fd"); + + private static final byte[] ROW_1 = Bytes.toBytes("testrow1"); + private static final byte[] ROW_2 = Bytes.toBytes("testrow2"); + private static final byte[] ROW_3 = Bytes.toBytes("testrow3"); + private static final byte[] ROW_4 = Bytes.toBytes("testrow4"); + + private static final byte[] QUALIFIER_1 = Bytes.toBytes("1"); + private static final byte[] QUALIFIER_2 = Bytes.toBytes("2"); + private static final byte[] VALUE_1 = Bytes.toBytes("testvalue1"); + private static final byte[] VALUE_2 = Bytes.toBytes("testvalue2"); + + private static final long ONE_HOUR = 60 * 60 * 1000; + private static final long TS_2 = System.currentTimeMillis(); + private static final long TS_1 = TS_2 - ONE_HOUR; + + + protected static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + + protected static ThriftServer thriftServer; + + protected static ThriftServer thriftHttpServer; + + protected static int thriftPort; + protected static int httpPort; + + protected static Connection thriftConnection; + protected static Connection thriftHttpConnection; + + private static Admin thriftAdmin; + + private static ThriftServer startThriftServer(int port, boolean useHttp) { + Configuration thriftServerConf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); + thriftServerConf.setInt(Constants.PORT_CONF_KEY, port); + if (useHttp) { + thriftServerConf.setBoolean(Constants.USE_HTTP_CONF_KEY, true); + } + ThriftServer server = new ThriftServer(thriftServerConf); + Thread thriftServerThread = new Thread(() -> { + try{ + server.run(); + } catch (Exception t) { + LOG.error("Thrift Server failed", t); + } + }); + thriftServerThread.setDaemon(true); + thriftServerThread.start(); + if (useHttp) { + TEST_UTIL.waitFor(10000, () -> server.getHttpServer() != null); + } else { + TEST_UTIL.waitFor(10000, () -> server.getTserver() != null); + } + return server; + } + + private static Connection createConnection(int port, boolean useHttp) throws IOException { + Configuration conf = HBaseConfiguration.create(TEST_UTIL.getConfiguration()); + conf.set(ClusterConnection.HBASE_CLIENT_CONNECTION_IMPL, + ThriftConnection.class.getName()); + if (useHttp) { + conf.set(Constants.HBASE_THRIFT_CLIENT_BUIDLER_CLASS, + ThriftConnection.HTTPThriftClientBuilder.class.getName()); + } + String host = HConstants.LOCALHOST; + if (useHttp) { + host = "http://" + host; + } + conf.set(Constants.HBASE_THRIFT_SERVER_NAME, host); + conf.setInt(Constants.HBASE_THRIFT_SERVER_PORT, port); + return ConnectionFactory.createConnection(conf); + } + + + @BeforeClass + public static void setUp() throws Exception { + // Do not start info server + TEST_UTIL.getConfiguration().setInt(THRIFT_INFO_SERVER_PORT , -1); + TEST_UTIL.startMiniCluster(); + thriftPort = HBaseTestingUtility.randomFreePort(); + httpPort = HBaseTestingUtility.randomFreePort(); + // Start a thrift server + thriftServer = startThriftServer(thriftPort, false); + // Start a HTTP thrift server + thriftHttpServer = startThriftServer(httpPort, true); + thriftConnection = createConnection(thriftPort, false); + thriftHttpConnection = createConnection(httpPort, true); + thriftAdmin = thriftConnection.getAdmin(); + LOG.info("TS_1=" + TS_1); + LOG.info("TS_2=" + TS_1); + + } + + @AfterClass + public static void shutdown() throws Exception { + if (thriftAdmin != null) { + thriftAdmin.close(); + } + if (thriftHttpConnection != null) { + thriftHttpConnection.close(); + } + if (thriftConnection != null) { + thriftConnection.close(); + } + if (thriftHttpServer != null) { + thriftHttpServer.stop(); + } + TEST_UTIL.shutdownMiniCluster(); + } + + @Test + public void testThrfitAdmin() throws Exception { + testThriftAdmin(thriftConnection, "testThrfitAdminNamesapce", "testThrfitAdminTable"); + testThriftAdmin(thriftHttpConnection, "testThrfitHttpAdminNamesapce", + "testThrfitHttpAdminTable"); + } + + @Test + public void testGet() throws Exception { + testGet(thriftConnection, "testGetTable"); + testGet(thriftHttpConnection, "testGetHttpTable"); + + } + + public void testGet(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + Get get = new Get(ROW_1); + Result result = table.get(get); + byte[] value1 = result.getValue(FAMILYA, QUALIFIER_1); + byte[] value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNull(value2); + + get = new Get(ROW_1); + get.addFamily(FAMILYC); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNull(value1); + assertNull(value2); + + get = new Get(ROW_1); + get.addColumn(FAMILYA, QUALIFIER_1); + get.addColumn(FAMILYB, QUALIFIER_2); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNull(value2); + + get = new Get(ROW_2); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_2, value1)); // @TS_2 + assertNotNull(value2); + assertTrue(Bytes.equals(VALUE_2, value2)); + + get = new Get(ROW_2); + get.addFamily(FAMILYA); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_2, value1)); // @TS_2 + assertNull(value2); + + get = new Get(ROW_2); + get.addColumn(FAMILYA, QUALIFIER_1); + get.addColumn(FAMILYB, QUALIFIER_2); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_2, value1)); // @TS_2 + assertNotNull(value2); + assertTrue(Bytes.equals(VALUE_2, value2)); + + // test timestamp + + get = new Get(ROW_2); + get.addFamily(FAMILYA); + get.addFamily(FAMILYB); + get.setTimestamp(TS_1); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); // @TS_1 + assertNull(value2); + + // test timerange + + get = new Get(ROW_2); + get.addFamily(FAMILYA); + get.addFamily(FAMILYB); + get.setTimeRange(0, TS_1 + 1); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); // @TS_1 + assertNull(value2); + + // test maxVersions + + get = new Get(ROW_2); + get.addFamily(FAMILYA); + get.setMaxVersions(2); + result = table.get(get); + int count = 0; + for (Cell kv: result.listCells()) { + if (CellUtil.matchingFamily(kv, FAMILYA) && TS_1 == kv.getTimestamp()) { + assertTrue(CellUtil.matchingValue(kv, VALUE_1)); // @TS_1 + count++; + } + if (CellUtil.matchingFamily(kv, FAMILYA) && TS_2 == kv.getTimestamp()) { + assertTrue(CellUtil.matchingValue(kv, VALUE_2)); // @TS_2 + count++; + } + } + assertEquals(2, count); + } + + } + + @Test + public void testMultiGet() throws Exception { + testMultiGet(thriftConnection, "testMultiGetTable"); + testMultiGet(thriftHttpConnection, "testMultiGetHttpTable"); + } + + public void testMultiGet(Connection connection, String tableName) throws Exception { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + ArrayList gets = new ArrayList<>(2); + gets.add(new Get(ROW_1)); + gets.add(new Get(ROW_2)); + Result[] results = table.get(gets); + assertNotNull(results); + assertEquals(2, results.length); + assertEquals(1, results[0].size()); + assertEquals(2, results[1].size()); + + //Test Versions + gets = new ArrayList<>(2); + Get g = new Get(ROW_1); + g.setMaxVersions(3); + gets.add(g); + Get get2 = new Get(ROW_2); + get2.setMaxVersions(3); + gets.add(get2); + results = table.get(gets); + assertNotNull(results); + assertEquals(2, results.length); + assertEquals(1, results[0].size()); + assertEquals(3, results[1].size()); + + gets = new ArrayList<>(1); + gets.add(new Get(Bytes.toBytes("RESALLYREALLYNOTTHERE"))); + results = table.get(gets); + assertNotNull(results); + assertTrue(results[0].isEmpty()); + + gets = new ArrayList<>(3); + gets.add(new Get(Bytes.toBytes("RESALLYREALLYNOTTHERE"))); + gets.add(new Get(ROW_1)); + gets.add(new Get(ROW_2)); + results = table.get(gets); + assertNotNull(results); + assertEquals(3, results.length); + assertTrue(results[0].isEmpty()); + } + + } + + @Test + public void testPut() throws Exception { + testPut(thriftConnection, "testPutTable"); + testPut(thriftHttpConnection, "testPutHttpTable"); + } + + public void testPut(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + Put put = new Put(ROW_3); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + table.put(put); + + Get get = new Get(ROW_3); + get.addFamily(FAMILYA); + Result result = table.get(get); + byte[] value = result.getValue(FAMILYA, QUALIFIER_1); + assertNotNull(value); + assertTrue(Bytes.equals(VALUE_1, value)); + + // multiput + + List puts = new ArrayList<>(3); + put = new Put(ROW_3); + put.addColumn(FAMILYB, QUALIFIER_2, VALUE_2); + puts.add(put); + put = new Put(ROW_4); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_4); + put.addColumn(FAMILYB, QUALIFIER_2, VALUE_2); + puts.add(put); + table.put(puts); + + get = new Get(ROW_3); + get.addFamily(FAMILYB); + result = table.get(get); + value = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value); + assertTrue(Bytes.equals(VALUE_2, value)); + get = new Get(ROW_4); + result = table.get(get); + value = result.getValue(FAMILYA, QUALIFIER_1); + assertNotNull(value); + assertTrue(Bytes.equals(VALUE_1, value)); + value = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value); + assertTrue(Bytes.equals(VALUE_2, value)); + } + } + + @Test + public void testDelete() throws Exception { + testDelete(thriftConnection, "testDeleteTable"); + testDelete(thriftHttpConnection, "testDeleteHttpTable"); + } + + public void testDelete(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + Put put = new Put(ROW_3); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + put.addColumn(FAMILYB, QUALIFIER_2, VALUE_2); + put.addColumn(FAMILYC, QUALIFIER_1, VALUE_1); + put.addColumn(FAMILYC, QUALIFIER_2, VALUE_2); + table.put(put); + + Get get = new Get(ROW_3); + get.addFamily(FAMILYA); + get.addFamily(FAMILYB); + get.addFamily(FAMILYC); + Result result = table.get(get); + byte[] value1 = result.getValue(FAMILYA, QUALIFIER_1); + byte[] value2 = result.getValue(FAMILYB, QUALIFIER_2); + byte[] value3 = result.getValue(FAMILYC, QUALIFIER_1); + byte[] value4 = result.getValue(FAMILYC, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNotNull(value2); + assertTrue(Bytes.equals(VALUE_2, value2)); + assertNotNull(value3); + assertTrue(Bytes.equals(VALUE_1, value3)); + assertNotNull(value4); + assertTrue(Bytes.equals(VALUE_2, value4)); + + Delete delete = new Delete(ROW_3); + delete.addColumn(FAMILYB, QUALIFIER_2); + table.delete(delete); + + get = new Get(ROW_3); + get.addFamily(FAMILYA); + get.addFamily(FAMILYB); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNull(value2); + + delete = new Delete(ROW_3); + delete.setTimestamp(1L); + table.delete(delete); + + get = new Get(ROW_3); + get.addFamily(FAMILYA); + get.addFamily(FAMILYB); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNull(value2); + + // Delete column family from row + delete = new Delete(ROW_3); + delete.addFamily(FAMILYC); + table.delete(delete); + + get = new Get(ROW_3); + get.addFamily(FAMILYC); + result = table.get(get); + value3 = result.getValue(FAMILYC, QUALIFIER_1); + value4 = result.getValue(FAMILYC, QUALIFIER_2); + assertNull(value3); + assertNull(value4); + + delete = new Delete(ROW_3); + table.delete(delete); + + get = new Get(ROW_3); + get.addFamily(FAMILYA); + get.addFamily(FAMILYB); + result = table.get(get); + value1 = result.getValue(FAMILYA, QUALIFIER_1); + value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNull(value1); + assertNull(value2); + } + + } + + @Test + public void testScanner() throws Exception { + testScanner(thriftConnection, "testScannerTable"); + testScanner(thriftHttpConnection, "testScannerHttpTable"); + } + + public void testScanner(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + List puts = new ArrayList<>(4); + Put put = new Put(ROW_1); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_2); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_3); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_4); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + table.put(puts); + + ResultScanner scanner = table.getScanner(new Scan()); + + Result[] results = scanner.next(1); + assertNotNull(results); + assertEquals(1, results.length); + assertTrue(Bytes.equals(ROW_1, results[0].getRow())); + + Result result = scanner.next(); + assertNotNull(result); + assertTrue(Bytes.equals(ROW_2, result.getRow())); + + results = scanner.next(2); + assertNotNull(results); + assertEquals(2, results.length); + assertTrue(Bytes.equals(ROW_3, results[0].getRow())); + assertTrue(Bytes.equals(ROW_4, results[1].getRow())); + + results = scanner.next(1); + assertTrue(results == null || results.length == 0); + scanner.close(); + + scanner = table.getScanner(FAMILYA); + results = scanner.next(4); + assertNotNull(results); + assertEquals(4, results.length); + assertTrue(Bytes.equals(ROW_1, results[0].getRow())); + assertTrue(Bytes.equals(ROW_2, results[1].getRow())); + assertTrue(Bytes.equals(ROW_3, results[2].getRow())); + assertTrue(Bytes.equals(ROW_4, results[3].getRow())); + + scanner.close(); + + scanner = table.getScanner(FAMILYA,QUALIFIER_1); + results = scanner.next(4); + assertNotNull(results); + assertEquals(4, results.length); + assertTrue(Bytes.equals(ROW_1, results[0].getRow())); + assertTrue(Bytes.equals(ROW_2, results[1].getRow())); + assertTrue(Bytes.equals(ROW_3, results[2].getRow())); + assertTrue(Bytes.equals(ROW_4, results[3].getRow())); + scanner.close(); + } + + } + + @Test + public void testCheckAndDelete() throws Exception { + testCheckAndDelete(thriftConnection, "testCheckAndDeleteTable"); + testCheckAndDelete(thriftHttpConnection, "testCheckAndDeleteHttpTable"); + } + + + public void testCheckAndDelete(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + Get get = new Get(ROW_1); + Result result = table.get(get); + byte[] value1 = result.getValue(FAMILYA, QUALIFIER_1); + byte[] value2 = result.getValue(FAMILYB, QUALIFIER_2); + assertNotNull(value1); + assertTrue(Bytes.equals(VALUE_1, value1)); + assertNull(value2); + assertTrue(table.exists(get)); + assertEquals(1, table.existsAll(Collections.singletonList(get)).length); + Delete delete = new Delete(ROW_1); + + table.checkAndMutate(ROW_1, FAMILYA).qualifier(QUALIFIER_1) + .ifEquals(VALUE_1).thenDelete(delete); + assertFalse(table.exists(get)); + + Put put = new Put(ROW_1); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + table.put(put); + + assertTrue(table.checkAndMutate(ROW_1, FAMILYA).qualifier(QUALIFIER_1) + .ifEquals(VALUE_1).thenPut(put)); + assertFalse(table.checkAndMutate(ROW_1, FAMILYA).qualifier(QUALIFIER_1) + .ifEquals(VALUE_2).thenPut(put)); + } + + } + + @Test + public void testIteratorScaner() throws Exception { + testIteratorScanner(thriftConnection, "testIteratorScanerTable"); + testIteratorScanner(thriftHttpConnection, "testIteratorScanerHttpTable"); + } + + public void testIteratorScanner(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + List puts = new ArrayList<>(4); + Put put = new Put(ROW_1); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_2); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_3); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_4); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + table.put(puts); + Scan scan = new Scan(); + scan.setCaching(1); + ResultScanner scanner = table.getScanner(scan); + Iterator iterator = scanner.iterator(); + assertTrue(iterator.hasNext()); + int counter = 0; + while (iterator.hasNext()) { + iterator.next(); + counter++; + } + assertEquals(4, counter); + } + + } + + @Test + public void testReverseScan() throws Exception { + testReverseScan(thriftConnection, "testReverseScanTable"); + testReverseScan(thriftHttpConnection, "testReverseScanHttpTable"); + } + + public void testReverseScan(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + List puts = new ArrayList<>(4); + Put put = new Put(ROW_1); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_2); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_3); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + put = new Put(ROW_4); + put.addColumn(FAMILYA, QUALIFIER_1, VALUE_1); + puts.add(put); + table.put(puts); + Scan scan = new Scan(); + scan.setReversed(true); + scan.setCaching(1); + ResultScanner scanner = table.getScanner(scan); + Iterator iterator = scanner.iterator(); + assertTrue(iterator.hasNext()); + int counter = 0; + Result lastResult = null; + while (iterator.hasNext()) { + Result current = iterator.next(); + if (lastResult != null) { + assertTrue(Bytes.compareTo(lastResult.getRow(), current.getRow()) > 0); + } + lastResult = current; + counter++; + } + assertEquals(4, counter); + } + + } + + + @Test + public void testScanWithFilters() throws Exception { + testIteratorScanner(thriftConnection, "testScanWithFiltersTable"); + testIteratorScanner(thriftHttpConnection, "testScanWithFiltersHttpTable"); + } + + private void testScanWithFilters(Connection connection, String tableName) throws IOException { + createTable(thriftAdmin, tableName); + try (Table table = connection.getTable(TableName.valueOf(tableName))){ + FilterList filterList = new FilterList(); + PrefixFilter prefixFilter = new PrefixFilter(Bytes.toBytes("testrow")); + ColumnValueFilter columnValueFilter = new ColumnValueFilter(FAMILYA, QUALIFIER_1, + CompareOperator.EQUAL, VALUE_1); + filterList.addFilter(prefixFilter); + filterList.addFilter(columnValueFilter); + Scan scan = new Scan(); + scan.setMaxVersions(2); + ResultScanner scanner = table.getScanner(scan); + Iterator iterator = scanner.iterator(); + assertTrue(iterator.hasNext()); + int counter = 0; + while (iterator.hasNext()) { + Result result = iterator.next(); + counter += result.size(); + } + assertEquals(2, counter); + } + } + + + private TableDescriptor createTable(Admin admin, String tableName) throws IOException { + TableDescriptorBuilder builder = TableDescriptorBuilder + .newBuilder(TableName.valueOf(tableName)); + ColumnFamilyDescriptorBuilder familyABuilder = ColumnFamilyDescriptorBuilder + .newBuilder(FAMILYA); + familyABuilder.setMaxVersions(3); + ColumnFamilyDescriptorBuilder familyBBuilder = ColumnFamilyDescriptorBuilder + .newBuilder(FAMILYB); + familyBBuilder.setMaxVersions(3); + ColumnFamilyDescriptorBuilder familyCBuilder = ColumnFamilyDescriptorBuilder + .newBuilder(FAMILYC); + familyCBuilder.setMaxVersions(3); + builder.setColumnFamily(familyABuilder.build()); + builder.setColumnFamily(familyBBuilder.build()); + builder.setColumnFamily(familyCBuilder.build()); + TableDescriptor tableDescriptor = builder.build(); + admin.createTable(tableDescriptor); + try (Table table = TEST_UTIL.getConnection().getTable(TableName.valueOf(tableName))) { + Put put = new Put(ROW_1); + put.addColumn(FAMILYA, QUALIFIER_1, TS_2, VALUE_1); + table.put(put); + put = new Put(ROW_2); + put.addColumn(FAMILYA, QUALIFIER_1, TS_1, VALUE_1); + put.addColumn(FAMILYA, QUALIFIER_1, TS_2, VALUE_2); + put.addColumn(FAMILYB, QUALIFIER_2, TS_2, VALUE_2); + table.put(put); + + } + return tableDescriptor; + + } + + private void testThriftAdmin(Connection connection, String namespace, String table) + throws Exception { + try (Admin admin = connection.getAdmin()){ + //create name space + NamespaceDescriptor namespaceDescriptor = NamespaceDescriptor.create(namespace).build(); + namespaceDescriptor.setConfiguration("key1", "value1"); + namespaceDescriptor.setConfiguration("key2", "value2"); + admin.createNamespace(namespaceDescriptor); + //list namespace + NamespaceDescriptor[] namespaceDescriptors = admin.listNamespaceDescriptors(); + boolean found = false; + for (NamespaceDescriptor nd : namespaceDescriptors) { + if (nd.getName().equals(namespace)) { + found = true; + break; + } + } + assertTrue(found); + //modify namesapce + namespaceDescriptor.setConfiguration("kye3", "value3"); + admin.modifyNamespace(namespaceDescriptor); + //get namespace + NamespaceDescriptor namespaceDescriptorReturned = admin.getNamespaceDescriptor(namespace); + assertTrue(namespaceDescriptorReturned.getConfiguration().size() == 3); + //create table + TableDescriptor tableDescriptor = createTable(admin, table); + //modify table + TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor); + builder.setDurability(Durability.ASYNC_WAL); + admin.modifyTable(builder.build()); + //modify column family + ColumnFamilyDescriptor familyA = tableDescriptor.getColumnFamily(FAMILYA); + ColumnFamilyDescriptorBuilder familyABuilder = ColumnFamilyDescriptorBuilder + .newBuilder(familyA); + familyABuilder.setInMemory(true); + admin.modifyColumnFamily(tableDescriptor.getTableName(), familyABuilder.build()); + //add column family + ColumnFamilyDescriptorBuilder familyDBuilder = ColumnFamilyDescriptorBuilder + .newBuilder(FAMILYD); + familyDBuilder.setDataBlockEncoding(DataBlockEncoding.PREFIX); + admin.addColumnFamily(tableDescriptor.getTableName(), familyDBuilder.build()); + //get table descriptor + TableDescriptor tableDescriptorReturned = admin.getDescriptor(tableDescriptor.getTableName()); + assertTrue(tableDescriptorReturned.getColumnFamilies().length == 4); + assertTrue(tableDescriptorReturned.getDurability() == Durability.ASYNC_WAL); + ColumnFamilyDescriptor columnFamilyADescriptor1Returned = tableDescriptorReturned + .getColumnFamily(FAMILYA); + assertTrue(columnFamilyADescriptor1Returned.isInMemory() == true); + //delete column family + admin.deleteColumnFamily(tableDescriptor.getTableName(), FAMILYA); + tableDescriptorReturned = admin.getDescriptor(tableDescriptor.getTableName()); + assertTrue(tableDescriptorReturned.getColumnFamilies().length == 3); + //disable table + admin.disableTable(tableDescriptor.getTableName()); + assertTrue(admin.isTableDisabled(tableDescriptor.getTableName())); + //enable table + admin.enableTable(tableDescriptor.getTableName()); + assertTrue(admin.isTableEnabled(tableDescriptor.getTableName())); + assertTrue(admin.isTableAvailable(tableDescriptor.getTableName())); + //truncate table + admin.disableTable(tableDescriptor.getTableName()); + admin.truncateTable(tableDescriptor.getTableName(), true); + assertTrue(admin.isTableAvailable(tableDescriptor.getTableName())); + //delete table + admin.disableTable(tableDescriptor.getTableName()); + admin.deleteTable(tableDescriptor.getTableName()); + assertFalse(admin.tableExists(tableDescriptor.getTableName())); + //delete namespace + admin.deleteNamespace(namespace); + namespaceDescriptors = admin.listNamespaceDescriptors(); + // should have 2 namespace, default and hbase + found = false; + for (NamespaceDescriptor nd : namespaceDescriptors) { + if (nd.getName().equals(namespace)) { + found = true; + break; + } + } + assertTrue(found == false); + } + } +}