HBASE-2962 Add missing methods to HTableInterface (and HTable)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@993548 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2010-09-07 22:20:40 +00:00
parent 1805c1b3b1
commit d98956f9cd
4 changed files with 84 additions and 4 deletions

View File

@ -884,6 +884,8 @@ Release 0.21.0 - Unreleased
HBASE-1676 load balancing on a large cluster doesn't work very well
HBASE-2953 Edit of hbase-default.xml removing stale configs.
HBASE-2857 HBaseAdmin.tableExists() should not require a full meta scan
HBASE-2962 Add missing methods to HTableInterface (and HTable)
(Lars Francke via Stack)
NEW FEATURES
HBASE-1961 HBase EC2 scripts

View File

@ -268,6 +268,7 @@ public class HTable implements HTableInterface {
return connection.getRegionLocation(tableName, row, false);
}
@Override
public byte [] getTableName() {
return this.tableName;
}
@ -305,6 +306,7 @@ public class HTable implements HTableInterface {
this.scannerCaching = scannerCaching;
}
@Override
public HTableDescriptor getTableDescriptor() throws IOException {
return new UnmodifyableHTableDescriptor(
this.connection.getHTableDescriptor(this.tableName));
@ -492,6 +494,7 @@ public class HTable implements HTableInterface {
return allRegions;
}
@Override
public Result getRowOrBefore(final byte[] row, final byte[] family)
throws IOException {
return connection.getRegionServerWithRetries(
@ -503,18 +506,21 @@ public class HTable implements HTableInterface {
});
}
@Override
public ResultScanner getScanner(final Scan scan) throws IOException {
ClientScanner s = new ClientScanner(scan);
s.initialize();
return s;
}
@Override
public ResultScanner getScanner(byte [] family) throws IOException {
Scan scan = new Scan();
scan.addFamily(family);
return getScanner(scan);
}
@Override
public ResultScanner getScanner(byte [] family, byte [] qualifier)
throws IOException {
Scan scan = new Scan();
@ -532,6 +538,10 @@ public class HTable implements HTableInterface {
);
}
public Result[] get(List<Get> gets) throws IOException {
return batch((List) gets);
}
/**
* Method that does a batch call on Deletes, Gets and Puts.
*
@ -541,6 +551,7 @@ public class HTable implements HTableInterface {
* the call for that action failed, even after retries
* @throws IOException
*/
@Override
public synchronized void batch(final List<Row> actions, final Result[] results) throws IOException {
connection.processBatch(actions, tableName, pool, results);
}
@ -553,6 +564,7 @@ public class HTable implements HTableInterface {
* the call for that action failed, even after retries
* @throws IOException
*/
@Override
public synchronized Result[] batch(final List<Row> actions) throws IOException {
Result[] results = new Result[actions.size()];
connection.processBatch(actions, tableName, pool, results);
@ -566,6 +578,7 @@ public class HTable implements HTableInterface {
* @throws IOException if a remote or network exception occurs.
* @since 0.20.0
*/
@Override
public void delete(final Delete delete)
throws IOException {
connection.getRegionServerWithRetries(
@ -587,6 +600,7 @@ public class HTable implements HTableInterface {
* that have not be successfully applied.
* @since 0.20.1
*/
@Override
public void delete(final List<Delete> deletes)
throws IOException {
Result[] results = new Result[deletes.size()];
@ -603,10 +617,12 @@ public class HTable implements HTableInterface {
}
}
@Override
public void put(final Put put) throws IOException {
doPut(Arrays.asList(put));
}
@Override
public void put(final List<Put> puts) throws IOException {
doPut(puts);
}
@ -622,12 +638,14 @@ public class HTable implements HTableInterface {
}
}
@Override
public long incrementColumnValue(final byte [] row, final byte [] family,
final byte [] qualifier, final long amount)
throws IOException {
return incrementColumnValue(row, family, qualifier, amount, true);
}
@Override
public long incrementColumnValue(final byte [] row, final byte [] family,
final byte [] qualifier, final long amount, final boolean writeToWAL)
throws IOException {
@ -665,6 +683,7 @@ public class HTable implements HTableInterface {
* @throws IOException
* @return true if the new put was execute, false otherwise
*/
@Override
public boolean checkAndPut(final byte [] row,
final byte [] family, final byte [] qualifier, final byte [] value,
final Put put)
@ -692,6 +711,7 @@ public class HTable implements HTableInterface {
* @throws IOException
* @return true if the new delete was executed, false otherwise
*/
@Override
public boolean checkAndDelete(final byte [] row,
final byte [] family, final byte [] qualifier, final byte [] value,
final Delete delete)
@ -719,6 +739,7 @@ public class HTable implements HTableInterface {
* @return true if the specified Get matches one or more keys, false if not
* @throws IOException
*/
@Override
public boolean exists(final Get get) throws IOException {
return connection.getRegionServerWithRetries(
new ServerCallable<Boolean>(connection, tableName, get.getRow()) {
@ -738,6 +759,7 @@ public class HTable implements HTableInterface {
* {@link #isAutoFlush()} is {@code true}.
* @throws IOException if a remote or network exception occurs.
*/
@Override
public void flushCommits() throws IOException {
try {
connection.processBatchOfPuts(writeBuffer, tableName, pool);
@ -750,10 +772,7 @@ public class HTable implements HTableInterface {
}
}
/**
* Close down this HTable instance.
* Calls {@link #flushCommits()}.
*/
@Override
public void close() throws IOException{
flushCommits();
}
@ -774,6 +793,7 @@ public class HTable implements HTableInterface {
}
}
@Override
public RowLock lockRow(final byte [] row)
throws IOException {
return connection.getRegionServerWithRetries(
@ -787,6 +807,7 @@ public class HTable implements HTableInterface {
);
}
@Override
public void unlockRow(final RowLock rl)
throws IOException {
connection.getRegionServerWithRetries(
@ -800,6 +821,7 @@ public class HTable implements HTableInterface {
);
}
@Override
public boolean isAutoFlush() {
return autoFlush;
}

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import java.io.IOException;
import java.util.List;
@ -69,6 +70,29 @@ public interface HTableInterface {
*/
boolean exists(Get get) throws IOException;
/**
* Method that does a batch call on Deletes, Gets and Puts.
*
* @param actions list of Get, Put, Delete objects
* @param results Empty Result[], same size as actions. Provides access to partial
* results, in case an exception is thrown. A null in the result array means that
* the call for that action failed, even after retries
* @throws IOException
* @since 0.90.0
*/
void batch(final List<Row> actions, final Result[] results) throws IOException;
/**
* Method that does a batch call on Deletes, Gets and Puts.
*
* @param actions list of Get, Put, Delete objects
* @return the results from the actions. A null in the return array means that
* the call for that action failed, even after retries
* @throws IOException
* @since 0.90.0
*/
Result[] batch(final List<Row> actions) throws IOException;
/**
* Extracts certain cells from a given row.
* @param get The object that specifies what data to fetch and from which row.
@ -80,6 +104,22 @@ public interface HTableInterface {
*/
Result get(Get get) throws IOException;
/**
* Extracts certain cells from the given rows, in batch.
*
* @param gets The objects that specify what data to fetch and from which rows.
*
* @return The data coming from the specified rows, if it exists. If the row
* specified doesn't exist, the {@link Result} instance returned won't
* contain any {@link KeyValue}, as indicated by {@link Result#isEmpty()}.
* A null in the return array means that the get operation for that
* Get failed, even after retries.
* @throws IOException if a remote or network exception occurs.
*
* @since 0.90.0
*/
Result[] get(List<Get> gets) throws IOException;
/**
* Return the row that matches <i>row</i> exactly,
* or the one that immediately precedes it.

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.RowLock;
@ -598,4 +599,19 @@ public class RemoteHTable implements HTableInterface {
throw new IOException("incrementColumnValue not supported");
}
@Override
public void batch(List<Row> actions, Result[] results) throws IOException {
throw new IOException("batch not supported");
}
@Override
public Result[] batch(List<Row> actions) throws IOException {
throw new IOException("batch not supported");
}
@Override
public Result[] get(List<Get> gets) throws IOException {
throw new IOException("get(List<Get>) not supported");
}
}