HBASE-697 thrift idl needs update/edit to match new 0.2 API (and to fix bugs)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@686568 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-08-16 22:04:16 +00:00
parent 8e95cab93c
commit e30ed932bb
17 changed files with 3590 additions and 1036 deletions

View File

@ -3,6 +3,9 @@ Hbase Change Log
Release 0.3.0 - Unreleased Release 0.3.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES
HBASE-697 Thrift idl needs update/edit to match new 0.2 API (and to fix bugs)
(Tim Sell via Stack)
BUG FIXES BUG FIXES
HBASE-805 Remove unnecessary getRow overloads in HRS (Jonathan Gray via HBASE-805 Remove unnecessary getRow overloads in HRS (Jonathan Gray via
Jim Kellerman) (Fix whitespace diffs in HRegionServer) Jim Kellerman) (Fix whitespace diffs in HRegionServer)

View File

@ -31,9 +31,10 @@
// used to generate the *.java files checked into the Hbase project. // used to generate the *.java files checked into the Hbase project.
// ---------------------------------------------------------------- // ----------------------------------------------------------------
java_package org.apache.hadoop.hbase.thrift.generated namespace java org.apache.hadoop.hbase.thrift.generated
cpp_namespace apache.hadoop.hbase.thrift namespace cpp apache.hadoop.hbase.thrift
ruby_namespace Apache.Hadoop.Hbase.Thrift namespace rb Apache.Hadoop.Hbase.Thrift
// note: other language namespaces tbd... // note: other language namespaces tbd...
// //
@ -50,6 +51,16 @@ typedef binary Text
typedef binary Bytes typedef binary Bytes
typedef i32 ScannerID typedef i32 ScannerID
/**
* TCell - Used to transport a cell value (byte[]) and the timestamp it was
* stored with together as a result for get and getRow methods. This promotes
* the timestamp of a cell to a first-class value, making it easy to take
* note of temporal data. Cell is used all the way from HStore up to HTable.
*/
struct TCell{
1:Bytes value,
2:i64 timestamp
}
/** /**
* An HColumnDescriptor contains information about a column family * An HColumnDescriptor contains information about a column family
@ -70,11 +81,14 @@ struct ColumnDescriptor {
} }
/** /**
* A RegionDescriptor contains informationa about an HTable region. * A TRegionInfo contains information about an HTable region.
* Currently, this is just the startKey of the region.
*/ */
struct RegionDescriptor { struct TRegionInfo {
1:Text startKey, 1:Text startKey,
2:Text endKey,
3:i64 id,
4:Text name,
5:byte version
} }
/** /**
@ -97,12 +111,11 @@ struct BatchMutation {
/** /**
* A ScanEntry contains the row, column, and value information for a scanner's * Holds row name and then a map of columns to cells.
* current location.
*/ */
struct ScanEntry { struct TRowResult {
1:Text row, 1:Text row,
2:map<Text, Bytes> columns 2:map<Text, TCell> columns
} }
// //
@ -146,6 +159,27 @@ exception AlreadyExists {
// //
service Hbase { service Hbase {
/**
* Brings a table on-line (enables it)
* @param tableName name of the table
*/
void enableTable(1:Bytes tableName)
throws (1:IOError io)
/**
* Disables a table (takes it off-line) If it is being served, the master
* will tell the servers to stop serving it.
* @param tableName name of the table
*/
void disableTable(1:Bytes tableName)
throws (1:IOError io)
/**
* @param tableName name of table to check
* @return true if table is on-line
*/
bool isTableEnabled(1:Bytes tableName)
throws (1:IOError io)
/** /**
* List all the userspace tables. * List all the userspace tables.
@ -167,7 +201,7 @@ service Hbase {
* @param tableName table name * @param tableName table name
* @return list of region descriptors * @return list of region descriptors
*/ */
list<RegionDescriptor> getTableRegions(1:Text tableName) list<TRegionInfo> getTableRegions(1:Text tableName)
throws (1:IOError io) throws (1:IOError io)
/** /**
@ -194,7 +228,7 @@ service Hbase {
throws (1:IOError io, 2:NotFound nf) throws (1:IOError io, 2:NotFound nf)
/** /**
* Get a single value for the specified table, row, and column at the * Get a single TCell for the specified table, row, and column at the
* latest timestamp. * latest timestamp.
* *
* @param tableName name of table * @param tableName name of table
@ -202,7 +236,7 @@ service Hbase {
* @param column column name * @param column column name
* @return value for specified row/column * @return value for specified row/column
*/ */
Bytes get(1:Text tableName, 2:Text row, 3:Text column) TCell get(1:Text tableName, 2:Text row, 3:Text column)
throws (1:IOError io, 2:NotFound nf) throws (1:IOError io, 2:NotFound nf)
/** /**
@ -213,9 +247,9 @@ service Hbase {
* @param row row key * @param row row key
* @param column column name * @param column column name
* @param numVersions number of versions to retrieve * @param numVersions number of versions to retrieve
* @return list of values for specified row/column * @return list of cells for specified row/column
*/ */
list<Bytes> getVer(1:Text tableName, 2:Text row, 3:Text column, 4:i32 numVersions) list<TCell> getVer(1:Text tableName, 2:Text row, 3:Text column, 4:i32 numVersions)
throws (1:IOError io, 2:NotFound nf) throws (1:IOError io, 2:NotFound nf)
/** /**
@ -228,9 +262,9 @@ service Hbase {
* @param column column name * @param column column name
* @param timestamp timestamp * @param timestamp timestamp
* @param numVersions number of versions to retrieve * @param numVersions number of versions to retrieve
* @return list of values for specified row/column * @return list of cells for specified row/column
*/ */
list<Bytes> getVerTs(1:Text tableName, 2:Text row, 3:Text column, 4:i64 timestamp, 5:i32 numVersions) list<TCell> getVerTs(1:Text tableName, 2:Text row, 3:Text column, 4:i64 timestamp, 5:i32 numVersions)
throws (1:IOError io, 2:NotFound nf) throws (1:IOError io, 2:NotFound nf)
/** /**
@ -239,9 +273,9 @@ service Hbase {
* *
* @param tableName name of table * @param tableName name of table
* @param row row key * @param row row key
* @return Map of columns to values. Map is empty if row does not exist. * @return TRowResult containing the row and map of columns to TCells. Map is empty if row does not exist.
*/ */
map<Text, Bytes> getRow(1:Text tableName, 2:Text row) TRowResult getRow(1:Text tableName, 2:Text row)
throws (1:IOError io) throws (1:IOError io)
/** /**
@ -251,24 +285,11 @@ service Hbase {
* @param tableName of table * @param tableName of table
* @param row row key * @param row row key
* @param timestamp timestamp * @param timestamp timestamp
* @return Map of columns to values. Map is empty if row does not exist. * @return TRowResult containing the row and map of columns to TCells. Map is empty if row does not exist.
*/ */
map<Text, Bytes> getRowTs(1:Text tableName, 2:Text row, 3:i64 timestamp) TRowResult getRowTs(1:Text tableName, 2:Text row, 3:i64 timestamp)
throws (1:IOError io) throws (1:IOError io)
/**
* Put a single value at the specified table, row, and column.
* To put muliple values in a single transaction, or to specify
* a non-default timestamp, use {@link #mutateRow} and/or
* {@link #mutateRowTs}
*
* @param tableName name of table
* @param row row key
* @param column column name
*/
void put(1:Text tableName, 2:Text row, 3:Text column, 4:Bytes value)
throws (1:IOError io, 2:IllegalArgument ia)
/** /**
* Apply a series of mutations (updates/deletes) to a row in a * Apply a series of mutations (updates/deletes) to a row in a
* single transaction. If an exception is thrown, then the * single transaction. If an exception is thrown, then the
@ -456,11 +477,11 @@ service Hbase {
* a NotFound exception is returned. * a NotFound exception is returned.
* *
* @param id id of a scanner returned by scannerOpen * @param id id of a scanner returned by scannerOpen
* @return a ScanEntry object representing the current row's values * @return a TRowResult containing the current row and a map of the columns to TCells.
* @throws IllegalArgument if ScannerID is invalid * @throws IllegalArgument if ScannerID is invalid
* @throws NotFound when the scanner reaches the end * @throws NotFound when the scanner reaches the end
*/ */
ScanEntry scannerGet(1:ScannerID id) TRowResult scannerGet(1:ScannerID id)
throws (1:IOError io, 2:IllegalArgument ia, 3:NotFound nf) throws (1:IOError io, 2:IllegalArgument ia, 3:NotFound nf)
/** /**

View File

@ -20,18 +20,19 @@ package org.apache.hadoop.hbase.thrift;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.MalformedInputException; import java.nio.charset.MalformedInputException;
import java.util.AbstractMap;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.Map.Entry;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
@ -48,8 +49,9 @@ import org.apache.hadoop.hbase.thrift.generated.IOError;
import org.apache.hadoop.hbase.thrift.generated.IllegalArgument; import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
import org.apache.hadoop.hbase.thrift.generated.Mutation; import org.apache.hadoop.hbase.thrift.generated.Mutation;
import org.apache.hadoop.hbase.thrift.generated.NotFound; import org.apache.hadoop.hbase.thrift.generated.NotFound;
import org.apache.hadoop.hbase.thrift.generated.RegionDescriptor; import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
import org.apache.hadoop.hbase.thrift.generated.ScanEntry; import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@ -164,7 +166,34 @@ public class ThriftServer {
// used to generate the interface. // used to generate the interface.
// //
public ArrayList<byte[]> getTableNames() throws IOError { public void enableTable(final byte[] tableName) throws IOError {
LOG.debug("enableTable");
try{
admin.enableTable(tableName);
} catch (IOException e) {
throw new IOError(e.getMessage());
}
}
public void disableTable(final byte[] tableName) throws IOError{
LOG.debug("disableTable");
try{
admin.disableTable(tableName);
} catch (IOException e) {
throw new IOError(e.getMessage());
}
}
public boolean isTableEnabled(final byte[] tableName) throws IOError {
LOG.debug("isTableEnabled");
try {
return HTable.isTableEnabled(tableName);
} catch (IOException e) {
throw new IOError(e.getMessage());
}
}
public List<byte[]> getTableNames() throws IOError {
LOG.debug("getTableNames"); LOG.debug("getTableNames");
try { try {
HTableDescriptor[] tables = this.admin.listTables(); HTableDescriptor[] tables = this.admin.listTables();
@ -178,25 +207,31 @@ public class ThriftServer {
} }
} }
public ArrayList<RegionDescriptor> getTableRegions(byte[] tableName) public List<TRegionInfo> getTableRegions(byte[] tableName)
throws IOError { throws IOError {
try{
LOG.debug("getTableRegions: " + new String(tableName)); LOG.debug("getTableRegions: " + new String(tableName));
try {
HTable table = getTable(tableName); HTable table = getTable(tableName);
byte [][] startKeys = table.getStartKeys(); Map<HRegionInfo, HServerAddress> regionsInfo = table.getRegionsInfo();
ArrayList<RegionDescriptor> regions = new ArrayList<RegionDescriptor>(); List<TRegionInfo> regions = new ArrayList<TRegionInfo>();
for (int i = 0; i < startKeys.length; i++) {
RegionDescriptor region = new RegionDescriptor(); for (HRegionInfo regionInfo : regionsInfo.keySet()){
region.startKey = startKeys[i]; TRegionInfo region = new TRegionInfo();
region.startKey = regionInfo.getStartKey();
region.endKey = regionInfo.getEndKey();
region.id = regionInfo.getRegionId();
region.name = regionInfo.getRegionName();
region.version = regionInfo.getVersion();
regions.add(region); regions.add(region);
} }
return regions; return regions;
} catch (IOException e) { } catch (IOException e){
throw new IOError(e.getMessage()); throw new IOError(e.getMessage());
} }
} }
public byte[] get(byte[] tableName, byte[] row, byte[] column) public TCell get(byte[] tableName, byte[] row, byte[] column)
throws NotFound, IOError { throws NotFound, IOError {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("get: table=" + new String(tableName) + ", row=" LOG.debug("get: table=" + new String(tableName) + ", row="
@ -204,17 +239,17 @@ public class ThriftServer {
} }
try { try {
HTable table = getTable(tableName); HTable table = getTable(tableName);
Cell value = table.get(getText(row), getText(column)); Cell cell = table.get(getText(row), getText(column));
if (value == null) { if (cell == null) {
throw new NotFound(); throw new NotFound();
} }
return value.getValue(); return ThriftUtilities.cellFromHBase(cell);
} catch (IOException e) { } catch (IOException e) {
throw new IOError(e.getMessage()); throw new IOError(e.getMessage());
} }
} }
public ArrayList<byte[]> getVer(byte[] tableName, byte[] row, public List<TCell> getVer(byte[] tableName, byte[] row,
byte[] column, int numVersions) throws IOError, NotFound { byte[] column, int numVersions) throws IOError, NotFound {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("getVer: table=" + new String(tableName) + ", row=" LOG.debug("getVer: table=" + new String(tableName) + ", row="
@ -223,14 +258,14 @@ public class ThriftServer {
} }
try { try {
HTable table = getTable(tableName); HTable table = getTable(tableName);
Cell[] values = Cell[] cells =
table.get(getText(row), getText(column), numVersions); table.get(getText(row), getText(column), numVersions);
if (values == null) { if (cells == null) {
throw new NotFound(); throw new NotFound();
} }
ArrayList<byte[]> list = new ArrayList<byte[]>(); List<TCell> list = new ArrayList<TCell>();
for (int i = 0; i < values.length; i++) { for (int i = 0; i < cells.length; i++) {
list.add(values[i].getValue()); list.add(ThriftUtilities.cellFromHBase(cells[i]));
} }
return list; return list;
} catch (IOException e) { } catch (IOException e) {
@ -238,7 +273,7 @@ public class ThriftServer {
} }
} }
public ArrayList<byte[]> getVerTs(byte[] tableName, byte[] row, public List<TCell> getVerTs(byte[] tableName, byte[] row,
byte[] column, long timestamp, int numVersions) throws IOError, byte[] column, long timestamp, int numVersions) throws IOError,
NotFound { NotFound {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
@ -248,14 +283,14 @@ public class ThriftServer {
} }
try { try {
HTable table = getTable(tableName); HTable table = getTable(tableName);
Cell[] values = table.get(getText(row), Cell[] cells = table.get(getText(row),
getText(column), timestamp, numVersions); getText(column), timestamp, numVersions);
if (values == null) { if (cells == null) {
throw new NotFound(); throw new NotFound();
} }
ArrayList<byte[]> list = new ArrayList<byte[]>(); List<TCell> list = new ArrayList<TCell>();
for (int i = 0; i < values.length; i++) { for (int i = 0; i < cells.length; i++) {
list.add(values[i].getValue()); list.add(ThriftUtilities.cellFromHBase(cells[i]));
} }
return list; return list;
} catch (IOException e) { } catch (IOException e) {
@ -263,12 +298,12 @@ public class ThriftServer {
} }
} }
public AbstractMap<byte[], byte[]> getRow(byte[] tableName, byte[] row) public TRowResult getRow(byte[] tableName, byte[] row)
throws IOError { throws IOError {
return getRowTs(tableName, row, HConstants.LATEST_TIMESTAMP); return getRowTs(tableName, row, HConstants.LATEST_TIMESTAMP);
} }
public AbstractMap<byte[], byte[]> getRowTs(byte[] tableName, byte[] row, public TRowResult getRowTs(byte[] tableName, byte[] row,
long timestamp) throws IOError { long timestamp) throws IOError {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("getRowTs: table=" + new String(tableName) + ", row=" LOG.debug("getRowTs: table=" + new String(tableName) + ", row="
@ -276,39 +311,12 @@ public class ThriftServer {
} }
try { try {
HTable table = getTable(tableName); HTable table = getTable(tableName);
Map<byte [], Cell> values = return ThriftUtilities.rowResultFromHBase(table.getRow(getText(row), timestamp));
table.getRow(getText(row), timestamp);
// copy the map from type <Text, Cell> to <byte[], byte[]>
TreeMap<byte[], byte[]> returnValues =
new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR);
for (Entry<byte [], Cell> e : values.entrySet()) {
returnValues.put(e.getKey(), e.getValue().getValue());
}
return returnValues;
} catch (IOException e) { } catch (IOException e) {
throw new IOError(e.getMessage()); throw new IOError(e.getMessage());
} }
} }
public void put(byte[] tableName, byte[] row, byte[] column, byte[] value)
throws IOError, IllegalArgument {
if (LOG.isDebugEnabled()) {
LOG.debug("put: table=" + new String(tableName) + ", row="
+ new String(row) + ", col=" + new String(column)
+ ", value.length=" + value.length);
}
try {
HTable table = getTable(tableName);
BatchUpdate batchUpdate = new BatchUpdate(getText(row));
batchUpdate.put(getText(column), value);
table.commit(batchUpdate);
} catch (IOException e) {
throw new IOError(e.getMessage());
} catch (IllegalArgumentException e) {
throw new IllegalArgument(e.getMessage());
}
}
public void deleteAll(byte[] tableName, byte[] row, byte[] column) public void deleteAll(byte[] tableName, byte[] row, byte[] column)
throws IOError { throws IOError {
deleteAllTs(tableName, row, column, HConstants.LATEST_TIMESTAMP); deleteAllTs(tableName, row, column, HConstants.LATEST_TIMESTAMP);
@ -348,7 +356,7 @@ public class ThriftServer {
} }
public void createTable(byte[] tableName, public void createTable(byte[] tableName,
ArrayList<ColumnDescriptor> columnFamilies) throws IOError, List<ColumnDescriptor> columnFamilies) throws IOError,
IllegalArgument, AlreadyExists { IllegalArgument, AlreadyExists {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("createTable: table=" + new String(tableName)); LOG.debug("createTable: table=" + new String(tableName));
@ -363,7 +371,7 @@ public class ThriftServer {
HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col); HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col);
desc.addFamily(colDesc); desc.addFamily(colDesc);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("createTable: col=" + colDesc.getName()); LOG.debug("createTable: col=" + new String(colDesc.getName()));
} }
} }
admin.createTable(desc); admin.createTable(desc);
@ -390,29 +398,27 @@ public class ThriftServer {
} }
public void mutateRow(byte[] tableName, byte[] row, public void mutateRow(byte[] tableName, byte[] row,
ArrayList<Mutation> mutations) throws IOError, IllegalArgument { List<Mutation> mutations) throws IOError, IllegalArgument {
mutateRowTs(tableName, row, mutations, HConstants.LATEST_TIMESTAMP); mutateRowTs(tableName, row, mutations, HConstants.LATEST_TIMESTAMP);
} }
public void mutateRowTs(byte[] tableName, byte[] row, public void mutateRowTs(byte[] tableName, byte[] row,
ArrayList<Mutation> mutations, long timestamp) throws IOError, IllegalArgument { List<Mutation> mutations, long timestamp) throws IOError, IllegalArgument {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("mutateRowTs: table=" + new String(tableName) + ", row=" LOG.debug("mutateRowTs: table=" + new String(tableName) + ", row="
+ new String(row) + ", ts=" + timestamp + " mutations=" + new String(row) + ", ts=" + timestamp + " mutations="
+ mutations.size()); + mutations.size());
for (Mutation m : mutations) { for (Mutation m : mutations) {
if (m.isDelete) { if (m.isDelete) {
LOG.debug("mutateRowTs: : delete - " + getText(m.column)); LOG.debug("mutateRowTs: : delete - " + new String(getText(m.column)));
} else { } else {
LOG.debug("mutateRowTs: : put - " + getText(m.column) + " => " LOG.debug("mutateRowTs: : put - " + new String(getText(m.column)) + " => "
+ m.value); + new String(m.value));
} }
} }
} }
Long lockid = null;
HTable table = null; HTable table = null;
try { try {
table = getTable(tableName); table = getTable(tableName);
BatchUpdate batchUpdate = new BatchUpdate(getText(row), timestamp); BatchUpdate batchUpdate = new BatchUpdate(getText(row), timestamp);
@ -431,29 +437,30 @@ public class ThriftServer {
} }
} }
public void mutateRows(byte[] tableName, List<BatchMutation> rowBatches)
public void mutateRows(byte[] tableName, ArrayList<BatchMutation> rowBatches)
throws IOError, IllegalArgument, TException { throws IOError, IllegalArgument, TException {
mutateRowsTs(tableName, rowBatches, HConstants.LATEST_TIMESTAMP); mutateRowsTs(tableName, rowBatches, HConstants.LATEST_TIMESTAMP);
} }
public void mutateRowsTs(byte[] tableName, ArrayList<BatchMutation> rowBatches, long timestamp) public void mutateRowsTs(byte[] tableName, List<BatchMutation> rowBatches, long timestamp)
throws IOError, IllegalArgument, TException { throws IOError, IllegalArgument, TException {
ArrayList<BatchUpdate> batchUpdates = new ArrayList<BatchUpdate>(); List<BatchUpdate> batchUpdates = new ArrayList<BatchUpdate>();
if (LOG.isDebugEnabled()) {
LOG.debug("mutateRowsTs: table=" + new String(tableName) + ", rows="
+ rowBatches.size() + ", ts=" + timestamp);
}
for (BatchMutation batch : rowBatches) { for (BatchMutation batch : rowBatches) {
byte[] row = batch.row; byte[] row = batch.row;
ArrayList<Mutation> mutations = batch.mutations; List<Mutation> mutations = batch.mutations;
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("mutateRowTs: table=" + new String(tableName) + ", row=" LOG.debug("mutateRowsTs: : row=" + new String(row) + " mutations=" + mutations.size());
+ new String(row) + ", ts=" + timestamp + " mutations="
+ mutations.size());
for (Mutation m : mutations) { for (Mutation m : mutations) {
if (m.isDelete) { if (m.isDelete) {
LOG.debug("mutateRowTs: : delete - " + getText(m.column)); LOG.debug("mutateRowsTs: : delete - " + new String(getText(m.column)));
} else { } else {
LOG.debug("mutateRowTs: : put - " + getText(m.column) + " => " LOG.debug("mutateRowsTs: : put - " + new String(getText(m.column)) + " => "
+ m.value); + new String(m.value));
} }
} }
} }
@ -489,7 +496,7 @@ public class ThriftServer {
removeScanner(id); removeScanner(id);
} }
public ScanEntry scannerGet(int id) throws IllegalArgument, NotFound, public TRowResult scannerGet(int id) throws IllegalArgument, NotFound,
IOError { IOError {
LOG.debug("scannerGet: id=" + id); LOG.debug("scannerGet: id=" + id);
Scanner scanner = getScanner(id); Scanner scanner = getScanner(id);
@ -507,22 +514,14 @@ public class ThriftServer {
} catch (IOException e) { } catch (IOException e) {
throw new IOError(e.getMessage()); throw new IOError(e.getMessage());
} }
return ThriftUtilities.rowResultFromHBase(results);
ScanEntry retval = new ScanEntry();
retval.row = results.getRow();
retval.columns = new TreeMap<byte[], byte[]>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte [], Cell> e : results.entrySet()) {
retval.columns.put(e.getKey(), e.getValue().getValue());
}
return retval;
} }
public int scannerOpen(byte[] tableName, byte[] startRow, public int scannerOpen(byte[] tableName, byte[] startRow,
ArrayList<byte[]> columns) throws IOError { List<byte[]> columns) throws IOError {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("scannerOpen: table=" + getText(tableName) + ", start=" LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
+ getText(startRow) + ", columns=" + columns.toString()); + new String(getText(startRow)) + ", columns=" + columns.toString());
} }
try { try {
HTable table = getTable(tableName); HTable table = getTable(tableName);
@ -539,10 +538,10 @@ public class ThriftServer {
} }
public int scannerOpenWithStop(byte[] tableName, byte[] startRow, public int scannerOpenWithStop(byte[] tableName, byte[] startRow,
byte[] stopRow, ArrayList<byte[]> columns) throws IOError, TException { byte[] stopRow, List<byte[]> columns) throws IOError, TException {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("scannerOpen: table=" + getText(tableName) + ", start=" LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
+ getText(startRow) + ", stop=" + getText(stopRow) + ", columns=" + new String(getText(startRow)) + ", stop=" + new String(getText(stopRow)) + ", columns="
+ columns.toString()); + columns.toString());
} }
try { try {
@ -560,10 +559,10 @@ public class ThriftServer {
} }
public int scannerOpenTs(byte[] tableName, byte[] startRow, public int scannerOpenTs(byte[] tableName, byte[] startRow,
ArrayList<byte[]> columns, long timestamp) throws IOError, TException { List<byte[]> columns, long timestamp) throws IOError, TException {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("scannerOpen: table=" + getText(tableName) + ", start=" LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
+ getText(startRow) + ", columns=" + columns.toString() + new String(getText(startRow)) + ", columns=" + columns.toString()
+ ", timestamp=" + timestamp); + ", timestamp=" + timestamp);
} }
try { try {
@ -581,11 +580,11 @@ public class ThriftServer {
} }
public int scannerOpenWithStopTs(byte[] tableName, byte[] startRow, public int scannerOpenWithStopTs(byte[] tableName, byte[] startRow,
byte[] stopRow, ArrayList<byte[]> columns, long timestamp) byte[] stopRow, List<byte[]> columns, long timestamp)
throws IOError, TException { throws IOError, TException {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("scannerOpen: table=" + getText(tableName) + ", start=" LOG.debug("scannerOpen: table=" + new String(getText(tableName)) + ", start="
+ getText(startRow) + ", stop=" + getText(stopRow) + ", columns=" + new String(getText(startRow)) + ", stop=" + new String(getText(stopRow)) + ", columns="
+ columns.toString() + ", timestamp=" + timestamp); + columns.toString() + ", timestamp=" + timestamp);
} }
try { try {
@ -602,7 +601,7 @@ public class ThriftServer {
} }
} }
public AbstractMap<byte[], ColumnDescriptor> getColumnDescriptors( public Map<byte[], ColumnDescriptor> getColumnDescriptors(
byte[] tableName) throws IOError, TException { byte[] tableName) throws IOError, TException {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("getColumnDescriptors: table=" + new String(tableName)); LOG.debug("getColumnDescriptors: table=" + new String(tableName));
@ -612,7 +611,7 @@ public class ThriftServer {
new TreeMap<byte[], ColumnDescriptor>(Bytes.BYTES_COMPARATOR); new TreeMap<byte[], ColumnDescriptor>(Bytes.BYTES_COMPARATOR);
HTable table = getTable(tableName); HTable table = getTable(tableName);
HTableDescriptor desc = table.getMetadata(); HTableDescriptor desc = table.getTableDescriptor();
for (HColumnDescriptor e : desc.getFamilies()) { for (HColumnDescriptor e : desc.getFamilies()) {
ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e); ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);

View File

@ -18,10 +18,21 @@
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor.CompressionType; import org.apache.hadoop.hbase.HColumnDescriptor.CompressionType;
import org.apache.hadoop.hbase.io.Cell;
import org.apache.hadoop.hbase.io.RowResult;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.IOError;
import org.apache.hadoop.hbase.thrift.generated.IllegalArgument; import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
import org.apache.hadoop.hbase.thrift.generated.NotFound;
import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.util.Bytes;
public class ThriftUtilities { public class ThriftUtilities {
@ -71,4 +82,34 @@ public class ThriftUtilities {
return col; return col;
} }
/**
* This utility method creates a new Thrift TCell "struct" based on
* an Hbase Cell object.
*
* @param in
* Hbase Cell object
* @return Thrift TCell
*/
static public TCell cellFromHBase(Cell in) {
return new TCell(in.getValue(), in.getTimestamp());
}
/**
* This utility method creates a new Thrift TRowResult "struct" based on
* an Hbase RowResult object.
*
* @param in
* Hbase RowResult object
* @return Thrift TRowResult
*/
static public TRowResult rowResultFromHBase(RowResult in) {
TRowResult result = new TRowResult();
result.row = in.getRow();
result.columns = new TreeMap<byte[], TCell>(Bytes.BYTES_COMPARATOR);
for (Map.Entry<byte[], Cell> entry : in.entrySet()){
result.columns.put(entry.getKey(), ThriftUtilities.cellFromHBase(entry.getValue()));
}
return result;
}
} }

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -40,7 +42,7 @@ public class AlreadyExists extends Exception implements TBase, java.io.Serializa
public String message; public String message;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean message = false; public boolean message = false;
} }
@ -55,6 +57,34 @@ public class AlreadyExists extends Exception implements TBase, java.io.Serializa
this.__isset.message = true; this.__isset.message = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof AlreadyExists)
return this.equals((AlreadyExists)that);
return false;
}
public boolean equals(AlreadyExists that) {
if (that == null)
return false;
boolean this_present_message = true && (this.message != null);
boolean that_present_message = true && (that.message != null);
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -37,10 +39,10 @@ import com.facebook.thrift.transport.*;
*/ */
public class BatchMutation implements TBase, java.io.Serializable { public class BatchMutation implements TBase, java.io.Serializable {
public byte[] row; public byte[] row;
public ArrayList<Mutation> mutations; public List<Mutation> mutations;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean row = false; public boolean row = false;
public boolean mutations = false; public boolean mutations = false;
} }
@ -50,7 +52,7 @@ public class BatchMutation implements TBase, java.io.Serializable {
public BatchMutation( public BatchMutation(
byte[] row, byte[] row,
ArrayList<Mutation> mutations) List<Mutation> mutations)
{ {
this(); this();
this.row = row; this.row = row;
@ -59,6 +61,43 @@ public class BatchMutation implements TBase, java.io.Serializable {
this.__isset.mutations = true; this.__isset.mutations = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof BatchMutation)
return this.equals((BatchMutation)that);
return false;
}
public boolean equals(BatchMutation that) {
if (that == null)
return false;
boolean this_present_row = true && (this.row != null);
boolean that_present_row = true && (that.row != null);
if (this_present_row || that_present_row) {
if (!(this_present_row && that_present_row))
return false;
if (!java.util.Arrays.equals(this.row, that.row))
return false;
}
boolean this_present_mutations = true && (this.mutations != null);
boolean that_present_mutations = true && (that.mutations != null);
if (this_present_mutations || that_present_mutations) {
if (!(this_present_mutations && that_present_mutations))
return false;
if (!this.mutations.equals(that.mutations))
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -50,7 +52,7 @@ public class ColumnDescriptor implements TBase, java.io.Serializable {
public int timeToLive; public int timeToLive;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean name = false; public boolean name = false;
public boolean maxVersions = false; public boolean maxVersions = false;
public boolean compression = false; public boolean compression = false;
@ -119,6 +121,115 @@ public class ColumnDescriptor implements TBase, java.io.Serializable {
this.__isset.timeToLive = true; this.__isset.timeToLive = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof ColumnDescriptor)
return this.equals((ColumnDescriptor)that);
return false;
}
public boolean equals(ColumnDescriptor that) {
if (that == null)
return false;
boolean this_present_name = true && (this.name != null);
boolean that_present_name = true && (that.name != null);
if (this_present_name || that_present_name) {
if (!(this_present_name && that_present_name))
return false;
if (!java.util.Arrays.equals(this.name, that.name))
return false;
}
boolean this_present_maxVersions = true;
boolean that_present_maxVersions = true;
if (this_present_maxVersions || that_present_maxVersions) {
if (!(this_present_maxVersions && that_present_maxVersions))
return false;
if (this.maxVersions != that.maxVersions)
return false;
}
boolean this_present_compression = true && (this.compression != null);
boolean that_present_compression = true && (that.compression != null);
if (this_present_compression || that_present_compression) {
if (!(this_present_compression && that_present_compression))
return false;
if (!this.compression.equals(that.compression))
return false;
}
boolean this_present_inMemory = true;
boolean that_present_inMemory = true;
if (this_present_inMemory || that_present_inMemory) {
if (!(this_present_inMemory && that_present_inMemory))
return false;
if (this.inMemory != that.inMemory)
return false;
}
boolean this_present_maxValueLength = true;
boolean that_present_maxValueLength = true;
if (this_present_maxValueLength || that_present_maxValueLength) {
if (!(this_present_maxValueLength && that_present_maxValueLength))
return false;
if (this.maxValueLength != that.maxValueLength)
return false;
}
boolean this_present_bloomFilterType = true && (this.bloomFilterType != null);
boolean that_present_bloomFilterType = true && (that.bloomFilterType != null);
if (this_present_bloomFilterType || that_present_bloomFilterType) {
if (!(this_present_bloomFilterType && that_present_bloomFilterType))
return false;
if (!this.bloomFilterType.equals(that.bloomFilterType))
return false;
}
boolean this_present_bloomFilterVectorSize = true;
boolean that_present_bloomFilterVectorSize = true;
if (this_present_bloomFilterVectorSize || that_present_bloomFilterVectorSize) {
if (!(this_present_bloomFilterVectorSize && that_present_bloomFilterVectorSize))
return false;
if (this.bloomFilterVectorSize != that.bloomFilterVectorSize)
return false;
}
boolean this_present_bloomFilterNbHashes = true;
boolean that_present_bloomFilterNbHashes = true;
if (this_present_bloomFilterNbHashes || that_present_bloomFilterNbHashes) {
if (!(this_present_bloomFilterNbHashes && that_present_bloomFilterNbHashes))
return false;
if (this.bloomFilterNbHashes != that.bloomFilterNbHashes)
return false;
}
boolean this_present_blockCacheEnabled = true;
boolean that_present_blockCacheEnabled = true;
if (this_present_blockCacheEnabled || that_present_blockCacheEnabled) {
if (!(this_present_blockCacheEnabled && that_present_blockCacheEnabled))
return false;
if (this.blockCacheEnabled != that.blockCacheEnabled)
return false;
}
boolean this_present_timeToLive = true;
boolean that_present_timeToLive = true;
if (this_present_timeToLive || that_present_timeToLive) {
if (!(this_present_timeToLive && that_present_timeToLive))
return false;
if (this.timeToLive != that.timeToLive)
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;

File diff suppressed because it is too large Load Diff

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -41,7 +43,7 @@ public class IOError extends Exception implements TBase, java.io.Serializable {
public String message; public String message;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean message = false; public boolean message = false;
} }
@ -56,6 +58,34 @@ public class IOError extends Exception implements TBase, java.io.Serializable {
this.__isset.message = true; this.__isset.message = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof IOError)
return this.equals((IOError)that);
return false;
}
public boolean equals(IOError that) {
if (that == null)
return false;
boolean this_present_message = true && (this.message != null);
boolean that_present_message = true && (that.message != null);
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -40,7 +42,7 @@ public class IllegalArgument extends Exception implements TBase, java.io.Seriali
public String message; public String message;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean message = false; public boolean message = false;
} }
@ -55,6 +57,34 @@ public class IllegalArgument extends Exception implements TBase, java.io.Seriali
this.__isset.message = true; this.__isset.message = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof IllegalArgument)
return this.equals((IllegalArgument)that);
return false;
}
public boolean equals(IllegalArgument that) {
if (that == null)
return false;
boolean this_present_message = true && (this.message != null);
boolean that_present_message = true && (that.message != null);
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -41,7 +43,7 @@ public class Mutation implements TBase, java.io.Serializable {
public byte[] value; public byte[] value;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean isDelete = false; public boolean isDelete = false;
public boolean column = false; public boolean column = false;
public boolean value = false; public boolean value = false;
@ -66,6 +68,52 @@ public class Mutation implements TBase, java.io.Serializable {
this.__isset.value = true; this.__isset.value = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof Mutation)
return this.equals((Mutation)that);
return false;
}
public boolean equals(Mutation that) {
if (that == null)
return false;
boolean this_present_isDelete = true;
boolean that_present_isDelete = true;
if (this_present_isDelete || that_present_isDelete) {
if (!(this_present_isDelete && that_present_isDelete))
return false;
if (this.isDelete != that.isDelete)
return false;
}
boolean this_present_column = true && (this.column != null);
boolean that_present_column = true && (that.column != null);
if (this_present_column || that_present_column) {
if (!(this_present_column && that_present_column))
return false;
if (!java.util.Arrays.equals(this.column, that.column))
return false;
}
boolean this_present_value = true && (this.value != null);
boolean that_present_value = true && (that.value != null);
if (this_present_value || that_present_value) {
if (!(this_present_value && that_present_value))
return false;
if (!java.util.Arrays.equals(this.value, that.value))
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -23,9 +23,11 @@
*/ */
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.AbstractMap; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import com.facebook.thrift.*; import com.facebook.thrift.*;
@ -40,7 +42,7 @@ public class NotFound extends Exception implements TBase, java.io.Serializable {
public String message; public String message;
public final Isset __isset = new Isset(); public final Isset __isset = new Isset();
public static final class Isset { public static final class Isset implements java.io.Serializable {
public boolean message = false; public boolean message = false;
} }
@ -55,6 +57,34 @@ public class NotFound extends Exception implements TBase, java.io.Serializable {
this.__isset.message = true; this.__isset.message = true;
} }
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof NotFound)
return this.equals((NotFound)that);
return false;
}
public boolean equals(NotFound that) {
if (that == null)
return false;
boolean this_present_message = true && (this.message != null);
boolean that_present_message = true && (that.message != null);
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
public int hashCode() {
return 0;
}
public void read(TProtocol iprot) throws TException { public void read(TProtocol iprot) throws TException {
TField field; TField field;
iprot.readStructBegin(); iprot.readStructBegin();

View File

@ -1,111 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*/
package org.apache.hadoop.hbase.thrift.generated;
import java.util.ArrayList;
import java.util.AbstractMap;
import java.util.HashMap;
import java.util.HashSet;
import com.facebook.thrift.*;
import com.facebook.thrift.protocol.*;
import com.facebook.thrift.transport.*;
/**
* A RegionDescriptor contains informationa about an HTable region.
* Currently, this is just the startKey of the region.
*/
public class RegionDescriptor implements TBase, java.io.Serializable {
public byte[] startKey;
public final Isset __isset = new Isset();
public static final class Isset {
public boolean startKey = false;
}
public RegionDescriptor() {
}
public RegionDescriptor(
byte[] startKey)
{
this();
this.startKey = startKey;
this.__isset.startKey = true;
}
public void read(TProtocol iprot) throws TException {
TField field;
iprot.readStructBegin();
while (true)
{
field = iprot.readFieldBegin();
if (field.type == TType.STOP) {
break;
}
switch (field.id)
{
case 1:
if (field.type == TType.STRING) {
this.startKey = iprot.readBinary();
this.__isset.startKey = true;
} else {
TProtocolUtil.skip(iprot, field.type);
}
break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
}
public void write(TProtocol oprot) throws TException {
TStruct struct = new TStruct("RegionDescriptor");
oprot.writeStructBegin(struct);
TField field = new TField();
if (this.startKey != null) {
field.name = "startKey";
field.type = TType.STRING;
field.id = 1;
oprot.writeFieldBegin(field);
oprot.writeBinary(this.startKey);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
public String toString() {
StringBuilder sb = new StringBuilder("RegionDescriptor(");
sb.append("startKey:");
sb.append(this.startKey);
sb.append(")");
return sb.toString();
}
}

View File

@ -1,153 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*/
package org.apache.hadoop.hbase.thrift.generated;
import java.util.ArrayList;
import java.util.AbstractMap;
import java.util.HashMap;
import java.util.HashSet;
import com.facebook.thrift.*;
import com.facebook.thrift.protocol.*;
import com.facebook.thrift.transport.*;
/**
* A ScanEntry contains the row, column, and value information for a scanner's
* current location.
*/
public class ScanEntry implements TBase, java.io.Serializable {
public byte[] row;
public AbstractMap<byte[],byte[]> columns;
public final Isset __isset = new Isset();
public static final class Isset {
public boolean row = false;
public boolean columns = false;
}
public ScanEntry() {
}
public ScanEntry(
byte[] row,
AbstractMap<byte[],byte[]> columns)
{
this();
this.row = row;
this.__isset.row = true;
this.columns = columns;
this.__isset.columns = true;
}
public void read(TProtocol iprot) throws TException {
TField field;
iprot.readStructBegin();
while (true)
{
field = iprot.readFieldBegin();
if (field.type == TType.STOP) {
break;
}
switch (field.id)
{
case 1:
if (field.type == TType.STRING) {
this.row = iprot.readBinary();
this.__isset.row = true;
} else {
TProtocolUtil.skip(iprot, field.type);
}
break;
case 2:
if (field.type == TType.MAP) {
{
TMap _map0 = iprot.readMapBegin();
this.columns = new HashMap<byte[],byte[]>(2*_map0.size);
for (int _i1 = 0; _i1 < _map0.size; ++_i1)
{
byte[] _key2;
byte[] _val3;
_key2 = iprot.readBinary();
_val3 = iprot.readBinary();
this.columns.put(_key2, _val3);
}
iprot.readMapEnd();
}
this.__isset.columns = true;
} else {
TProtocolUtil.skip(iprot, field.type);
}
break;
default:
TProtocolUtil.skip(iprot, field.type);
break;
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
}
public void write(TProtocol oprot) throws TException {
TStruct struct = new TStruct("ScanEntry");
oprot.writeStructBegin(struct);
TField field = new TField();
if (this.row != null) {
field.name = "row";
field.type = TType.STRING;
field.id = 1;
oprot.writeFieldBegin(field);
oprot.writeBinary(this.row);
oprot.writeFieldEnd();
}
if (this.columns != null) {
field.name = "columns";
field.type = TType.MAP;
field.id = 2;
oprot.writeFieldBegin(field);
{
oprot.writeMapBegin(new TMap(TType.STRING, TType.STRING, this.columns.size()));
for (byte[] _iter4 : this.columns.keySet()) {
oprot.writeBinary(_iter4);
oprot.writeBinary(this.columns.get(_iter4));
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
public String toString() {
StringBuilder sb = new StringBuilder("ScanEntry(");
sb.append("row:");
sb.append(this.row);
sb.append(",columns:");
sb.append(this.columns);
sb.append(")");
return sb.toString();
}
}

View File

@ -49,7 +49,7 @@ org.apache.hadoop.hbase.thrift.generated} directory.
<p>The files were generated by running the commands: <p>The files were generated by running the commands:
<pre> <pre>
thrift -strict -java Hbase.thrift thrift -strict --gen java Hbase.thrift
mv gen-java/org/apache/hadoop/hbase/thrift/generated . mv gen-java/org/apache/hadoop/hbase/thrift/generated .
rm -rf gen-java rm -rf gen-java
</pre> </pre>
@ -62,8 +62,8 @@ part of the Thrift package. A version of the Java runtime is checked into SVN
under the hbase/lib directory. under the hbase/lib directory.
</p> </p>
<p>The version of Thrift used to generate the Java files is revision 746 from <p>The version of Thrift used to generate the Java files is release 20080411p1 from
the <a href="http://svn.facebook.com/svnroot/thrift/">SVN repository</a>.</p> the <a href="http://developers.facebook.com/thrift/">thrift homepage</a>.</p>
<p>The ThriftServer is run like: <p>The ThriftServer is run like:
<pre> <pre>