2008-06-17 19:58:05 -04:00
|
|
|
# HBase ruby classes.
|
|
|
|
# Has wrapper classes for org.apache.hadoop.hbase.client.HBaseAdmin
|
|
|
|
# and for org.apache.hadoop.hbase.client.HTable. Classes take
|
|
|
|
# Formatters on construction and outputs any results using
|
|
|
|
# Formatter methods. These classes are only really for use by
|
|
|
|
# the hirb.rb HBase Shell script; they don't make much sense elsewhere.
|
|
|
|
# For example, the exists method on Admin class prints to the formatter
|
|
|
|
# whether the table exists and returns nil regardless.
|
|
|
|
include Java
|
2008-09-22 17:53:38 -04:00
|
|
|
include_class('java.lang.Integer') {|package,name| "J#{name}" }
|
2009-01-29 11:51:26 -05:00
|
|
|
include_class('java.lang.Long') {|package,name| "J#{name}" }
|
2008-09-22 17:53:38 -04:00
|
|
|
include_class('java.lang.Boolean') {|package,name| "J#{name}" }
|
|
|
|
|
2008-06-17 19:58:05 -04:00
|
|
|
import org.apache.hadoop.hbase.client.HBaseAdmin
|
|
|
|
import org.apache.hadoop.hbase.client.HTable
|
|
|
|
import org.apache.hadoop.hbase.HConstants
|
|
|
|
import org.apache.hadoop.hbase.io.BatchUpdate
|
|
|
|
import org.apache.hadoop.hbase.io.RowResult
|
|
|
|
import org.apache.hadoop.hbase.io.Cell
|
2009-03-03 16:02:31 -05:00
|
|
|
import org.apache.hadoop.hbase.io.hfile.Compression
|
2008-06-17 19:58:05 -04:00
|
|
|
import org.apache.hadoop.hbase.HBaseConfiguration
|
|
|
|
import org.apache.hadoop.hbase.HColumnDescriptor
|
|
|
|
import org.apache.hadoop.hbase.HTableDescriptor
|
2008-06-18 18:24:34 -04:00
|
|
|
import org.apache.hadoop.hbase.util.Bytes
|
|
|
|
import org.apache.hadoop.hbase.util.Writables
|
2008-12-23 18:10:25 -05:00
|
|
|
import org.apache.hadoop.hbase.HRegionInfo
|
2008-06-17 19:58:05 -04:00
|
|
|
|
2008-06-12 02:00:35 -04:00
|
|
|
module HBase
|
2008-06-17 19:58:05 -04:00
|
|
|
COLUMN = "COLUMN"
|
|
|
|
COLUMNS = "COLUMNS"
|
|
|
|
TIMESTAMP = "TIMESTAMP"
|
|
|
|
NAME = HConstants::NAME
|
|
|
|
VERSIONS = HConstants::VERSIONS
|
2008-07-22 15:22:21 -04:00
|
|
|
IN_MEMORY = HConstants::IN_MEMORY
|
2008-06-17 19:58:05 -04:00
|
|
|
STOPROW = "STOPROW"
|
|
|
|
STARTROW = "STARTROW"
|
2008-06-18 18:24:34 -04:00
|
|
|
ENDROW = STOPROW
|
2008-06-17 19:58:05 -04:00
|
|
|
LIMIT = "LIMIT"
|
2008-09-22 17:16:21 -04:00
|
|
|
METHOD = "METHOD"
|
2009-02-26 12:55:27 -05:00
|
|
|
MAXLENGTH = "MAXLENGTH"
|
2008-06-17 19:58:05 -04:00
|
|
|
|
|
|
|
# Wrapper for org.apache.hadoop.hbase.client.HBaseAdmin
|
2008-06-12 02:00:35 -04:00
|
|
|
class Admin
|
|
|
|
def initialize(configuration, formatter)
|
|
|
|
@admin = HBaseAdmin.new(configuration)
|
|
|
|
@formatter = formatter
|
|
|
|
end
|
|
|
|
|
|
|
|
def list
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
|
|
|
for t in @admin.listTables()
|
|
|
|
@formatter.row([t.getNameAsString()])
|
|
|
|
end
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-13 01:50:00 -04:00
|
|
|
def describe(tableName)
|
|
|
|
now = Time.now
|
2009-04-22 15:14:15 -04:00
|
|
|
@formatter.header(["DESCRIPTION", "ENABLED"], [64])
|
2008-06-13 01:50:00 -04:00
|
|
|
found = false
|
2008-10-08 19:41:17 -04:00
|
|
|
tables = @admin.listTables().to_a
|
|
|
|
tables.push(HTableDescriptor::META_TABLEDESC, HTableDescriptor::ROOT_TABLEDESC)
|
|
|
|
for t in tables
|
2008-06-13 01:50:00 -04:00
|
|
|
if t.getNameAsString() == tableName
|
2009-04-22 15:14:15 -04:00
|
|
|
@formatter.row([t.to_s, "%s" % [@admin.isTableEnabled(tableName)]], true, [64])
|
2008-06-13 01:50:00 -04:00
|
|
|
found = true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if not found
|
2008-06-17 19:58:05 -04:00
|
|
|
raise ArgumentError.new("Failed to find table named " + tableName)
|
2008-06-13 01:50:00 -04:00
|
|
|
end
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-12 02:00:35 -04:00
|
|
|
def exists(tableName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
2008-06-17 19:58:05 -04:00
|
|
|
e = @admin.tableExists(tableName)
|
|
|
|
@formatter.row([e.to_s])
|
2008-06-12 02:00:35 -04:00
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2009-01-05 20:12:36 -05:00
|
|
|
def flush(tableNameOrRegionName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
|
|
|
@admin.flush(tableNameOrRegionName)
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
|
|
|
def compact(tableNameOrRegionName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
|
|
|
@admin.compact(tableNameOrRegionName)
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
|
|
|
def major_compact(tableNameOrRegionName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
|
|
|
@admin.majorCompact(tableNameOrRegionName)
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
|
|
|
def split(tableNameOrRegionName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
|
|
|
@admin.split(tableNameOrRegionName)
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-12 02:00:35 -04:00
|
|
|
def enable(tableName)
|
|
|
|
# TODO: Need an isEnabled method
|
|
|
|
now = Time.now
|
|
|
|
@admin.enableTable(tableName)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
|
|
|
def disable(tableName)
|
|
|
|
# TODO: Need an isDisabled method
|
|
|
|
now = Time.now
|
|
|
|
@admin.disableTable(tableName)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-12-23 18:10:25 -05:00
|
|
|
def enable_region(regionName)
|
|
|
|
online(regionName, false)
|
|
|
|
end
|
|
|
|
|
|
|
|
def disable_region(regionName)
|
|
|
|
online(regionName, true)
|
|
|
|
end
|
|
|
|
|
|
|
|
def online(regionName, onOrOff)
|
|
|
|
now = Time.now
|
|
|
|
meta = HTable.new(HConstants::META_TABLE_NAME)
|
|
|
|
bytes = Bytes.toBytes(regionName)
|
|
|
|
hriBytes = meta.get(bytes, HConstants::COL_REGIONINFO).getValue()
|
|
|
|
hri = Writables.getWritable(hriBytes, HRegionInfo.new());
|
|
|
|
hri.setOffline(onOrOff)
|
|
|
|
p hri
|
|
|
|
bu = BatchUpdate.new(bytes)
|
|
|
|
bu.put(HConstants::COL_REGIONINFO, Writables.getBytes(hri))
|
|
|
|
meta.commit(bu);
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-12 02:00:35 -04:00
|
|
|
def drop(tableName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
2008-08-08 14:53:30 -04:00
|
|
|
if @admin.isTableEnabled(tableName)
|
|
|
|
raise IOError.new("Table " + tableName + " is enabled. Disable it first")
|
|
|
|
else
|
|
|
|
@admin.deleteTable(tableName)
|
|
|
|
end
|
2008-06-12 02:00:35 -04:00
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-10-29 14:01:35 -04:00
|
|
|
def truncate(tableName)
|
|
|
|
now = Time.now
|
|
|
|
@formatter.header()
|
|
|
|
hTable = HTable.new(tableName)
|
|
|
|
tableDescription = hTable.getTableDescriptor()
|
|
|
|
puts 'Truncating ' + tableName + '; it may take a while'
|
|
|
|
puts 'Disabling table...'
|
|
|
|
disable(tableName)
|
|
|
|
puts 'Dropping table...'
|
|
|
|
drop(tableName)
|
|
|
|
puts 'Creating table...'
|
|
|
|
@admin.createTable(tableDescription)
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-17 19:58:05 -04:00
|
|
|
# Pass tablename and an array of Hashes
|
2008-06-12 02:00:35 -04:00
|
|
|
def create(tableName, args)
|
|
|
|
now = Time.now
|
|
|
|
# Pass table name and an array of Hashes. Later, test the last
|
|
|
|
# array to see if its table options rather than column family spec.
|
|
|
|
raise TypeError.new("Table name must be of type String") \
|
|
|
|
unless tableName.instance_of? String
|
|
|
|
# For now presume all the rest of the args are column family
|
|
|
|
# hash specifications. TODO: Add table options handling.
|
|
|
|
htd = HTableDescriptor.new(tableName)
|
|
|
|
for arg in args
|
2008-06-17 19:58:05 -04:00
|
|
|
if arg.instance_of? String
|
|
|
|
htd.addFamily(HColumnDescriptor.new(makeColumnName(arg)))
|
|
|
|
else
|
|
|
|
raise TypeError.new(arg.class.to_s + " of " + arg.to_s + " is not of Hash type") \
|
|
|
|
unless arg.instance_of? Hash
|
|
|
|
htd.addFamily(hcd(arg))
|
|
|
|
end
|
2008-06-12 02:00:35 -04:00
|
|
|
end
|
|
|
|
@admin.createTable(htd)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
2008-06-13 18:42:11 -04:00
|
|
|
|
|
|
|
def alter(tableName, args)
|
2008-09-22 17:16:21 -04:00
|
|
|
now = Time.now
|
2008-06-13 18:42:11 -04:00
|
|
|
raise TypeError.new("Table name must be of type String") \
|
|
|
|
unless tableName.instance_of? String
|
2008-09-22 17:16:21 -04:00
|
|
|
htd = @admin.getTableDescriptor(tableName.to_java_bytes)
|
|
|
|
method = args.delete(METHOD)
|
|
|
|
if method == "delete"
|
|
|
|
@admin.deleteColumn(tableName, makeColumnName(args[NAME]))
|
2009-01-29 11:51:26 -05:00
|
|
|
elsif method == "table_att"
|
|
|
|
args[MAX_FILESIZE]? htd.setMaxFileSize(JLong.valueOf(args[MAX_FILESIZE])) :
|
|
|
|
htd.setMaxFileSize(HTableDescriptor::DEFAULT_MAX_FILESIZE);
|
|
|
|
args[READONLY]? htd.setReadOnly(JBoolean.valueOf(args[READONLY])) :
|
|
|
|
htd.setReadOnly(HTableDescriptor::DEFAULT_READONLY);
|
|
|
|
args[MEMCACHE_FLUSHSIZE]?
|
|
|
|
htd.setMemcacheFlushSize(JLong.valueOf(args[MEMCACHE_FLUSHSIZE])) :
|
|
|
|
htd.setMemcacheFlushSize(HTableDescriptor::DEFAULT_MEMCACHE_FLUSH_SIZE);
|
2009-03-03 16:02:31 -05:00
|
|
|
@admin.modifyTable(tableName.to_java_bytes, htd)
|
2008-09-22 17:16:21 -04:00
|
|
|
else
|
|
|
|
descriptor = hcd(args)
|
|
|
|
if (htd.hasFamily(descriptor.getNameAsString().to_java_bytes))
|
|
|
|
@admin.modifyColumn(tableName, descriptor.getNameAsString(),
|
|
|
|
descriptor);
|
|
|
|
else
|
|
|
|
@admin.addColumn(tableName, descriptor);
|
|
|
|
end
|
|
|
|
end
|
2008-06-13 18:42:11 -04:00
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-12-23 18:10:25 -05:00
|
|
|
def close_region(regionName, server)
|
|
|
|
now = Time.now
|
|
|
|
s = nil
|
|
|
|
s = [server].to_java if server
|
|
|
|
@admin.closeRegion(regionName, s)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-17 19:58:05 -04:00
|
|
|
# Make a legal column name of the passed String
|
|
|
|
# Check string ends in colon. If not, add it.
|
|
|
|
def makeColumnName(arg)
|
|
|
|
index = arg.index(':')
|
|
|
|
if not index
|
|
|
|
# Add a colon. If already a colon, its in the right place,
|
|
|
|
# or an exception will come up out of the addFamily
|
|
|
|
arg << ':'
|
|
|
|
end
|
|
|
|
arg
|
|
|
|
end
|
|
|
|
|
2009-02-03 05:22:06 -05:00
|
|
|
def shutdown()
|
|
|
|
@admin.shutdown()
|
|
|
|
end
|
|
|
|
|
2008-06-13 18:42:11 -04:00
|
|
|
def hcd(arg)
|
|
|
|
# Return a new HColumnDescriptor made of passed args
|
|
|
|
# TODO: This is brittle code.
|
|
|
|
# Here is current HCD constructor:
|
2009-03-03 16:02:31 -05:00
|
|
|
# public HColumnDescriptor(final byte [] familyName, final int maxVersions,
|
|
|
|
# final String compression, final boolean inMemory,
|
|
|
|
# final boolean blockCacheEnabled, final int blocksize,
|
|
|
|
# final int maxValueLength,
|
|
|
|
# final int timeToLive, final boolean bloomFilter) {
|
2008-06-13 18:42:11 -04:00
|
|
|
name = arg[NAME]
|
|
|
|
raise ArgumentError.new("Column family " + arg + " must have a name") \
|
|
|
|
unless name
|
2008-06-17 19:58:05 -04:00
|
|
|
name = makeColumnName(name)
|
2008-06-13 18:42:11 -04:00
|
|
|
# TODO: What encoding are Strings in jruby?
|
|
|
|
return HColumnDescriptor.new(name.to_java_bytes,
|
|
|
|
# JRuby uses longs for ints. Need to convert. Also constants are String
|
2008-09-22 17:53:38 -04:00
|
|
|
arg[VERSIONS]? JInteger.new(arg[VERSIONS]): HColumnDescriptor::DEFAULT_VERSIONS,
|
2009-03-03 16:02:31 -05:00
|
|
|
arg[HColumnDescriptor::COMPRESSION]? arg[HColumnDescriptor::COMPRESSION]: HColumnDescriptor::DEFAULT_COMPRESSION,
|
2008-09-22 17:53:38 -04:00
|
|
|
arg[IN_MEMORY]? JBoolean.valueOf(arg[IN_MEMORY]): HColumnDescriptor::DEFAULT_IN_MEMORY,
|
|
|
|
arg[HColumnDescriptor::BLOCKCACHE]? JBoolean.valueOf(arg[HColumnDescriptor::BLOCKCACHE]): HColumnDescriptor::DEFAULT_BLOCKCACHE,
|
2009-03-03 16:02:31 -05:00
|
|
|
arg[HColumnDescriptor::BLOCKSIZE]? JInteger.valueOf(arg[HColumnDescriptor::BLOCKSIZE]): HColumnDescriptor::DEFAULT_BLOCKSIZE,
|
2008-09-22 17:53:38 -04:00
|
|
|
arg[HColumnDescriptor::LENGTH]? JInteger.new(arg[HColumnDescriptor::LENGTH]): HColumnDescriptor::DEFAULT_LENGTH,
|
|
|
|
arg[HColumnDescriptor::TTL]? JInteger.new(arg[HColumnDescriptor::TTL]): HColumnDescriptor::DEFAULT_TTL,
|
|
|
|
arg[HColumnDescriptor::BLOOMFILTER]? JBoolean.valueOf(arg[HColumnDescriptor::BLOOMFILTER]): HColumnDescriptor::DEFAULT_BLOOMFILTER)
|
2008-06-13 18:42:11 -04:00
|
|
|
end
|
2008-06-12 02:00:35 -04:00
|
|
|
end
|
|
|
|
|
2008-06-17 19:58:05 -04:00
|
|
|
# Wrapper for org.apache.hadoop.hbase.client.HTable
|
2008-06-12 02:00:35 -04:00
|
|
|
class Table
|
2008-06-17 19:58:05 -04:00
|
|
|
def initialize(configuration, tableName, formatter)
|
|
|
|
@table = HTable.new(configuration, tableName)
|
|
|
|
@formatter = formatter
|
|
|
|
end
|
|
|
|
|
|
|
|
# Delete a cell
|
2008-07-09 17:39:45 -04:00
|
|
|
def delete(row, column, timestamp = HConstants::LATEST_TIMESTAMP)
|
2008-06-17 19:58:05 -04:00
|
|
|
now = Time.now
|
2008-07-09 17:39:45 -04:00
|
|
|
bu = BatchUpdate.new(row, timestamp)
|
2008-06-17 19:58:05 -04:00
|
|
|
bu.delete(column)
|
|
|
|
@table.commit(bu)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
|
|
|
def deleteall(row, column = nil, timestamp = HConstants::LATEST_TIMESTAMP)
|
|
|
|
now = Time.now
|
|
|
|
@table.deleteAll(row, column, timestamp)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
2008-06-18 18:24:34 -04:00
|
|
|
def getAllColumns
|
2008-08-29 19:36:02 -04:00
|
|
|
htd = @table.getTableDescriptor()
|
2008-06-18 18:24:34 -04:00
|
|
|
result = []
|
|
|
|
for f in htd.getFamilies()
|
|
|
|
n = f.getNameAsString()
|
|
|
|
n << ':'
|
|
|
|
result << n
|
|
|
|
end
|
|
|
|
result
|
|
|
|
end
|
|
|
|
|
2008-10-08 19:45:30 -04:00
|
|
|
def scan(args = {})
|
2008-06-17 19:58:05 -04:00
|
|
|
now = Time.now
|
|
|
|
limit = -1
|
2009-02-26 12:55:27 -05:00
|
|
|
maxlength = -1
|
2008-10-08 19:45:30 -04:00
|
|
|
if args != nil and args.length > 0
|
2008-06-17 19:58:05 -04:00
|
|
|
limit = args["LIMIT"] || -1
|
2009-02-26 12:55:27 -05:00
|
|
|
maxlength = args["MAXLENGTH"] || -1
|
2008-06-17 19:58:05 -04:00
|
|
|
filter = args["FILTER"] || nil
|
|
|
|
startrow = args["STARTROW"] || ""
|
|
|
|
stoprow = args["STOPROW"] || nil
|
|
|
|
timestamp = args["TIMESTAMP"] || HConstants::LATEST_TIMESTAMP
|
2008-10-08 19:45:30 -04:00
|
|
|
columns = args["COLUMNS"] || getAllColumns()
|
|
|
|
|
|
|
|
if columns.class == String
|
|
|
|
columns = [columns]
|
|
|
|
elsif columns.class != Array
|
|
|
|
raise ArgumentError.new("COLUMNS must be specified as a String or an Array")
|
|
|
|
end
|
|
|
|
cs = columns.to_java(java.lang.String)
|
|
|
|
|
2008-06-17 19:58:05 -04:00
|
|
|
if stoprow
|
|
|
|
s = @table.getScanner(cs, startrow, stoprow, timestamp)
|
|
|
|
else
|
|
|
|
s = @table.getScanner(cs, startrow, timestamp, filter)
|
|
|
|
end
|
2008-10-08 19:45:30 -04:00
|
|
|
else
|
|
|
|
columns = getAllColumns()
|
|
|
|
s = @table.getScanner(columns.to_java(java.lang.String))
|
|
|
|
end
|
2008-06-17 19:58:05 -04:00
|
|
|
count = 0
|
2008-06-18 18:24:34 -04:00
|
|
|
@formatter.header(["ROW", "COLUMN+CELL"])
|
2008-06-17 19:58:05 -04:00
|
|
|
i = s.iterator()
|
|
|
|
while i.hasNext()
|
|
|
|
r = i.next()
|
|
|
|
row = String.from_java_bytes r.getRow()
|
|
|
|
for k, v in r
|
|
|
|
column = String.from_java_bytes k
|
2009-02-26 12:55:27 -05:00
|
|
|
cell = toString(column, v, maxlength)
|
2008-06-18 18:24:34 -04:00
|
|
|
@formatter.row([row, "column=%s, %s" % [column, cell]])
|
2008-06-17 19:58:05 -04:00
|
|
|
end
|
|
|
|
count += 1
|
|
|
|
if limit != -1 and count >= limit
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
|
|
|
|
|
|
|
def put(row, column, value, timestamp = nil)
|
|
|
|
now = Time.now
|
|
|
|
bu = nil
|
|
|
|
if timestamp
|
2008-07-21 11:47:35 -04:00
|
|
|
bu = BatchUpdate.new(row, timestamp)
|
2008-06-17 19:58:05 -04:00
|
|
|
else
|
|
|
|
bu = BatchUpdate.new(row)
|
|
|
|
end
|
|
|
|
bu.put(column, value.to_java_bytes)
|
|
|
|
@table.commit(bu)
|
|
|
|
@formatter.header()
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
2008-06-18 18:24:34 -04:00
|
|
|
|
|
|
|
def isMetaTable()
|
|
|
|
tn = @table.getTableName()
|
2008-08-17 18:13:13 -04:00
|
|
|
return Bytes.equals(tn, HConstants::META_TABLE_NAME) ||
|
|
|
|
Bytes.equals(tn, HConstants::ROOT_TABLE_NAME)
|
2008-06-18 18:24:34 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
# Make a String of the passed cell.
|
|
|
|
# Intercept cells whose format we know such as the info:regioninfo in .META.
|
2009-02-26 12:55:27 -05:00
|
|
|
def toString(column, cell, maxlength)
|
2008-06-18 18:24:34 -04:00
|
|
|
if isMetaTable()
|
|
|
|
if column == 'info:regioninfo'
|
|
|
|
hri = Writables.getHRegionInfoOrNull(cell.getValue())
|
|
|
|
return "timestamp=%d, value=%s" % [cell.getTimestamp(), hri.toString()]
|
|
|
|
elsif column == 'info:serverstartcode'
|
|
|
|
return "timestamp=%d, value=%s" % [cell.getTimestamp(), \
|
|
|
|
Bytes.toLong(cell.getValue())]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
cell.toString()
|
2009-02-26 12:55:27 -05:00
|
|
|
val = cell.toString()
|
|
|
|
maxlength != -1 ? val[0, maxlength] : val
|
2008-06-18 18:24:34 -04:00
|
|
|
end
|
2008-06-17 19:58:05 -04:00
|
|
|
|
|
|
|
# Get from table
|
|
|
|
def get(row, args = {})
|
|
|
|
now = Time.now
|
|
|
|
result = nil
|
2009-02-26 12:55:27 -05:00
|
|
|
if args == nil or args.length == 0 or (args.length == 1 and args[MAXLENGTH] != nil)
|
2008-06-17 19:58:05 -04:00
|
|
|
result = @table.getRow(row.to_java_bytes)
|
|
|
|
else
|
|
|
|
# Its a hash.
|
|
|
|
columns = args[COLUMN]
|
|
|
|
if columns == nil
|
|
|
|
# Maybe they used the COLUMNS key
|
|
|
|
columns = args[COLUMNS]
|
|
|
|
end
|
|
|
|
if columns == nil
|
|
|
|
# May have passed TIMESTAMP and row only; wants all columns from ts.
|
|
|
|
ts = args[TIMESTAMP]
|
|
|
|
if not ts
|
|
|
|
raise ArgumentError.new("Failed parse of " + args + ", " + args.class)
|
|
|
|
end
|
|
|
|
result = @table.getRow(row.to_java_bytes, ts)
|
|
|
|
else
|
|
|
|
# Columns are non-nil
|
|
|
|
if columns.class == String
|
|
|
|
# Single column
|
|
|
|
result = @table.get(row, columns,
|
|
|
|
args[TIMESTAMP]? args[TIMESTAMP]: HConstants::LATEST_TIMESTAMP,
|
|
|
|
args[VERSIONS]? args[VERSIONS]: 1)
|
|
|
|
elsif columns.class == Array
|
|
|
|
result = @table.getRow(row, columns.to_java(:string),
|
|
|
|
args[TIMESTAMP]? args[TIMESTAMP]: HConstants::LATEST_TIMESTAMP)
|
|
|
|
else
|
|
|
|
raise ArgumentError.new("Failed parse column argument type " +
|
|
|
|
args + ", " + args.class)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
# Print out results. Result can be Cell or RowResult.
|
2009-02-26 12:55:27 -05:00
|
|
|
maxlength = args[MAXLENGTH] || -1
|
2008-06-17 19:58:05 -04:00
|
|
|
h = nil
|
|
|
|
if result.instance_of? RowResult
|
|
|
|
h = String.from_java_bytes result.getRow()
|
2008-06-18 18:24:34 -04:00
|
|
|
@formatter.header(["COLUMN", "CELL"])
|
2008-06-17 19:58:05 -04:00
|
|
|
if result
|
2008-06-18 18:24:34 -04:00
|
|
|
for k, v in result
|
|
|
|
column = String.from_java_bytes k
|
2009-02-26 12:55:27 -05:00
|
|
|
@formatter.row([column, toString(column, v, maxlength)])
|
2008-06-17 19:58:05 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
# Presume Cells
|
|
|
|
@formatter.header()
|
|
|
|
if result
|
|
|
|
for c in result
|
2009-02-26 12:55:27 -05:00
|
|
|
@formatter.row([toString(nil, c, maxlength)])
|
2008-06-17 19:58:05 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
@formatter.footer(now)
|
|
|
|
end
|
2008-08-12 14:30:33 -04:00
|
|
|
|
|
|
|
def count(interval = 1000)
|
|
|
|
now = Time.now
|
|
|
|
columns = getAllColumns()
|
|
|
|
cs = columns.to_java(java.lang.String)
|
|
|
|
s = @table.getScanner(cs)
|
|
|
|
count = 0
|
|
|
|
i = s.iterator()
|
2008-12-31 19:36:27 -05:00
|
|
|
@formatter.header()
|
2008-08-12 14:30:33 -04:00
|
|
|
while i.hasNext()
|
|
|
|
r = i.next()
|
|
|
|
count += 1
|
|
|
|
if count % interval == 0
|
|
|
|
@formatter.row(["Current count: " + count.to_s + ", row: " + \
|
|
|
|
(String.from_java_bytes r.getRow())])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
@formatter.footer(now, count)
|
|
|
|
end
|
|
|
|
|
2008-06-17 19:58:05 -04:00
|
|
|
end
|
|
|
|
|
2008-06-18 18:24:34 -04:00
|
|
|
# Testing. To run this test, there needs to be an hbase cluster up and
|
|
|
|
# running. Then do: ${HBASE_HOME}/bin/hbase org.jruby.Main bin/HBase.rb
|
2008-06-17 19:58:05 -04:00
|
|
|
if $0 == __FILE__
|
|
|
|
# Add this directory to LOAD_PATH; presumption is that Formatter module
|
|
|
|
# sits beside this one. Then load it up.
|
|
|
|
$LOAD_PATH.unshift File.dirname($PROGRAM_NAME)
|
|
|
|
require 'Formatter'
|
|
|
|
# Make a console formatter
|
|
|
|
formatter = Formatter::Console.new(STDOUT)
|
|
|
|
# Now add in java and hbase classes
|
|
|
|
configuration = HBaseConfiguration.new()
|
|
|
|
admin = Admin.new(configuration, formatter)
|
2008-06-18 18:24:34 -04:00
|
|
|
# Drop old table. If it does not exist, get an exception. Catch and
|
|
|
|
# continue
|
2008-06-17 19:58:05 -04:00
|
|
|
TESTTABLE = "HBase_rb_testtable"
|
|
|
|
begin
|
|
|
|
admin.disable(TESTTABLE)
|
|
|
|
admin.drop(TESTTABLE)
|
|
|
|
rescue org.apache.hadoop.hbase.TableNotFoundException
|
|
|
|
# Just suppress not found exception
|
|
|
|
end
|
|
|
|
admin.create(TESTTABLE, [{NAME => 'x', VERSIONS => 5}])
|
|
|
|
# Presume it exists. If it doesn't, next items will fail.
|
|
|
|
table = Table.new(configuration, TESTTABLE, formatter)
|
|
|
|
for i in 1..10
|
2008-06-18 18:24:34 -04:00
|
|
|
table.put('x%d' % i, 'x:%d' % i, 'x%d' % i)
|
2008-06-17 19:58:05 -04:00
|
|
|
end
|
2008-06-18 18:24:34 -04:00
|
|
|
table.get('x1', {COLUMN => 'x:1'})
|
2008-06-17 19:58:05 -04:00
|
|
|
if formatter.rowCount() != 1
|
|
|
|
raise IOError.new("Failed first put")
|
|
|
|
end
|
|
|
|
table.scan(['x:'])
|
2008-06-18 18:24:34 -04:00
|
|
|
if formatter.rowCount() != 10
|
|
|
|
raise IOError.new("Failed scan of expected 10 rows")
|
|
|
|
end
|
|
|
|
# Verify that limit works.
|
|
|
|
table.scan(['x:'], {LIMIT => 3})
|
|
|
|
if formatter.rowCount() != 3
|
|
|
|
raise IOError.new("Failed scan of expected 3 rows")
|
|
|
|
end
|
|
|
|
# Should only be two rows if we start at 8 (Row x10 sorts beside x1).
|
|
|
|
table.scan(['x:'], {STARTROW => 'x8', LIMIT => 3})
|
|
|
|
if formatter.rowCount() != 2
|
|
|
|
raise IOError.new("Failed scan of expected 2 rows")
|
|
|
|
end
|
|
|
|
# Scan between two rows
|
|
|
|
table.scan(['x:'], {STARTROW => 'x5', ENDROW => 'x8'})
|
|
|
|
if formatter.rowCount() != 3
|
|
|
|
raise IOError.new("Failed endrow test")
|
|
|
|
end
|
2008-07-09 17:39:45 -04:00
|
|
|
# Verify that delete works
|
|
|
|
table.delete('x1', 'x:1');
|
|
|
|
table.scan(['x:1'])
|
|
|
|
scan1 = formatter.rowCount()
|
|
|
|
table.scan(['x:'])
|
|
|
|
scan2 = formatter.rowCount()
|
|
|
|
if scan1 != 0 or scan2 != 9
|
|
|
|
raise IOError.new("Failed delete test")
|
|
|
|
end
|
|
|
|
# Verify that deletall works
|
|
|
|
table.put('x2', 'x:1', 'x:1')
|
|
|
|
table.deleteall('x2')
|
|
|
|
table.scan(['x:2'])
|
|
|
|
scan1 = formatter.rowCount()
|
|
|
|
table.scan(['x:'])
|
|
|
|
scan2 = formatter.rowCount()
|
|
|
|
if scan1 != 0 or scan2 != 8
|
|
|
|
raise IOError.new("Failed deleteall test")
|
|
|
|
end
|
2008-06-17 19:58:05 -04:00
|
|
|
admin.disable(TESTTABLE)
|
|
|
|
admin.drop(TESTTABLE)
|
2008-06-12 02:00:35 -04:00
|
|
|
end
|
|
|
|
end
|