HBASE-6471 Performance regression caused by HBASE-4054
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1373917 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
03d60b55db
commit
14d07f96e1
|
@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.coprocessor.Batch;
|
||||
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
|
||||
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.PoolMap;
|
||||
|
@ -177,11 +178,7 @@ public class HTablePool implements Closeable {
|
|||
HTableInterface table = findOrCreateTable(tableName);
|
||||
// return a proxy table so when user closes the proxy, the actual table
|
||||
// will be returned to the pool
|
||||
try {
|
||||
return new PooledHTable(table);
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -324,12 +321,11 @@ public class HTablePool implements Closeable {
|
|||
* wrapped table back to the table pool
|
||||
*
|
||||
*/
|
||||
class PooledHTable extends HTable {
|
||||
class PooledHTable implements HTableInterface {
|
||||
|
||||
private HTableInterface table; // actual table implementation
|
||||
|
||||
public PooledHTable(HTableInterface table) throws IOException {
|
||||
super(table.getConfiguration(), table.getTableName());
|
||||
public PooledHTable(HTableInterface table) {
|
||||
this.table = table;
|
||||
}
|
||||
|
||||
|
@ -376,6 +372,7 @@ public class HTablePool implements Closeable {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("deprecation")
|
||||
public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
|
||||
return table.getRowOrBefore(row, family);
|
||||
}
|
||||
|
@ -509,5 +506,48 @@ public class HTablePool implements Closeable {
|
|||
HTableInterface getWrappedTable() {
|
||||
return table;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R> void batchCallback(List<? extends Row> actions,
|
||||
Object[] results, Callback<R> callback) throws IOException,
|
||||
InterruptedException {
|
||||
table.batchCallback(actions, results, callback);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <R> Object[] batchCallback(List<? extends Row> actions,
|
||||
Callback<R> callback) throws IOException, InterruptedException {
|
||||
return table.batchCallback(actions, callback);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mutateRow(RowMutations rm) throws IOException {
|
||||
table.mutateRow(rm);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Result append(Append append) throws IOException {
|
||||
return table.append(append);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAutoFlush(boolean autoFlush) {
|
||||
table.setAutoFlush(autoFlush);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
|
||||
table.setAutoFlush(autoFlush, clearBufferOnFail);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getWriteBufferSize() {
|
||||
return table.getWriteBufferSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setWriteBufferSize(long writeBufferSize) throws IOException {
|
||||
table.setWriteBufferSize(writeBufferSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
|
@ -38,13 +37,12 @@ import org.apache.commons.logging.LogFactory;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.ServerName;
|
||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||
import org.apache.hadoop.hbase.client.HTablePool;
|
||||
import org.apache.hadoop.hbase.client.MetaScanner;
|
||||
import org.apache.hadoop.hbase.rest.model.TableInfoModel;
|
||||
import org.apache.hadoop.hbase.rest.model.TableRegionModel;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class RegionsResource extends ResourceBase {
|
||||
|
@ -69,17 +67,6 @@ public class RegionsResource extends ResourceBase {
|
|||
this.tableResource = tableResource;
|
||||
}
|
||||
|
||||
private Map<HRegionInfo,HServerAddress> getTableRegions()
|
||||
throws IOException {
|
||||
HTablePool pool = servlet.getTablePool();
|
||||
HTableInterface table = pool.getTable(tableResource.getName());
|
||||
try {
|
||||
return ((HTable)table).getRegionsInfo();
|
||||
} finally {
|
||||
table.close();
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF})
|
||||
public Response get(final @Context UriInfo uriInfo) {
|
||||
|
@ -90,15 +77,14 @@ public class RegionsResource extends ResourceBase {
|
|||
try {
|
||||
String tableName = tableResource.getName();
|
||||
TableInfoModel model = new TableInfoModel(tableName);
|
||||
Map<HRegionInfo,HServerAddress> regions = getTableRegions();
|
||||
for (Map.Entry<HRegionInfo,HServerAddress> e: regions.entrySet()) {
|
||||
Map<HRegionInfo,ServerName> regions = MetaScanner.allTableRegions(
|
||||
servlet.getConfiguration(), Bytes.toBytes(tableName), false);
|
||||
for (Map.Entry<HRegionInfo,ServerName> e: regions.entrySet()) {
|
||||
HRegionInfo hri = e.getKey();
|
||||
HServerAddress addr = e.getValue();
|
||||
InetSocketAddress sa = addr.getInetSocketAddress();
|
||||
ServerName addr = e.getValue();
|
||||
model.add(
|
||||
new TableRegionModel(tableName, hri.getRegionId(),
|
||||
hri.getStartKey(), hri.getEndKey(),
|
||||
sa.getHostName() + ":" + Integer.valueOf(sa.getPort())));
|
||||
hri.getStartKey(), hri.getEndKey(), addr.getHostAndPort()));
|
||||
}
|
||||
ResponseBuilder response = Response.ok(model);
|
||||
response.cacheControl(cacheControl);
|
||||
|
|
|
@ -183,22 +183,6 @@ public class TestHTablePool {
|
|||
Assert.assertTrue("alien table rejected", true);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassCastException() {
|
||||
//this test makes sure that client code that
|
||||
//casts the table it got from pool to HTable won't break
|
||||
HTablePool pool = new HTablePool(TEST_UTIL.getConfiguration(),
|
||||
Integer.MAX_VALUE);
|
||||
String tableName = Bytes.toString(TABLENAME);
|
||||
try {
|
||||
// get table and check if type is HTable
|
||||
HTable table = (HTable) pool.getTable(tableName);
|
||||
Assert.assertTrue("return type is HTable as expected", true);
|
||||
} catch (ClassCastException e) {
|
||||
Assert.fail("return type is not HTable");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Category(MediumTests.class)
|
||||
|
|
Loading…
Reference in New Issue