HBASE-451 Remove HTableDescriptor from HRegionInfo -- part 2, some cleanup

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1138601 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2011-06-22 19:55:40 +00:00
parent f389865787
commit 7b6f6d6e19
5 changed files with 20 additions and 8 deletions

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.avro.generated.AClusterStatus;
import org.apache.hadoop.hbase.avro.generated.ADelete;
import org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor;
@ -190,6 +191,8 @@ public class AvroServer {
public ATableDescriptor describeTable(ByteBuffer table) throws AIOError {
try {
return AvroUtil.htdToATD(admin.getTableDescriptor(Bytes.toBytes(table)));
} catch (TableNotFoundException e) {
return null;
} catch (IOException e) {
AIOError ioe = new AIOError();
ioe.message = new Utf8(e.getMessage());

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.RegionException;
import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
@ -194,10 +195,11 @@ public class HBaseAdmin implements Abortable, Closeable {
* Method for getting the tableDescriptor
* @param tableName as a byte []
* @return the tableDescriptor
* @throws TableNotFoundException
* @throws IOException if a remote or network exception occurs
*/
public HTableDescriptor getTableDescriptor(final byte [] tableName)
throws IOException {
throws TableNotFoundException, IOException {
return this.connection.getHTableDescriptor(tableName);
}

View File

@ -936,6 +936,9 @@ public class FSUtils {
HTableDescriptor htd = null;
try {
htd = getTableDescriptor(fs, getTablePath(hbaseRootDir, tableName));
} catch (NullPointerException e) {
LOG.debug("Exception during readTableDecriptor. Current table name = " +
tableName , e);
} catch (IOException ioe) {
LOG.debug("Exception during readTableDecriptor. Current table name = " +
tableName , ioe);
@ -944,10 +947,10 @@ public class FSUtils {
}
public static HTableDescriptor getTableDescriptor(FileSystem fs, Path tableDir)
throws IOException {
throws IOException, NullPointerException {
if (tableDir == null) throw new NullPointerException();
FSDataInputStream fsDataInputStream =
fs.open(new Path(tableDir, HConstants.TABLEINFO_NAME));
Path tableinfo = new Path(tableDir, HConstants.TABLEINFO_NAME);
FSDataInputStream fsDataInputStream = fs.open(tableinfo);
HTableDescriptor hTableDescriptor = null;
try {
hTableDescriptor = new HTableDescriptor();

View File

@ -125,7 +125,11 @@ public class TestAvroServer {
tableA.maxFileSize = 123456L;
impl.modifyTable(tableAname, tableA);
// It can take a while for the change to take effect. Wait here a while.
while(impl.describeTable(tableAname).maxFileSize != 123456L) Threads.sleep(100);
while(impl.describeTable(tableAname) == null) {
Threads.sleep(100);
}
assertTrue(impl.describeTable(tableAname).maxFileSize == 123456L);
assertEquals(123456L, (long) impl.describeTable(tableAname).maxFileSize);
/* DISABLED FOR NOW TILL WE HAVE BETTER DISABLE/ENABLE
impl.enableTable(tableAname);

View File

@ -36,8 +36,6 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.junit.Test;
@ -96,7 +94,9 @@ public class TestMergeTable {
byte [] row_70001 = Bytes.toBytes("row_70001");
byte [] row_80001 = Bytes.toBytes("row_80001");
// Create regions and populate them at same time.
// Create regions and populate them at same time. Create the tabledir
// for them first.
FSUtils.createTableDescriptor(fs, rootdir, desc);
HRegion [] regions = {
createRegion(desc, null, row_70001, 1, 70000, rootdir),
createRegion(desc, row_70001, row_80001, 70001, 10000, rootdir),