diff --git a/CHANGES.txt b/CHANGES.txt
index d00ff96d79e..11531c4f956 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -38,6 +38,7 @@ Hbase Change Log
HBASE-663 Incorrect sequence number for cache flush
HBASE-655 Need programmatic way to add column family: need programmatic way
to enable/disable table
+ HBASE-654 API HTable.getMetadata().addFamily shouldn't be exposed to user
IMPROVEMENTS
HBASE-559 MR example job to count table rows
@@ -71,7 +72,11 @@ Hbase Change Log
(Clint Morgan via Stack)
HBASE-579 Add hadoop 0.17.x
HBASE-660 [Migration] addColumn/deleteColumn functionality in MetaUtils
+ HBASE-632 HTable.getMetadata is very inefficient
+ NEW FEATURES
+ HBASE-639 Add HBaseAdmin.getTableDescriptor function
+
Release 0.1.2 - 05/13/2008
BUG FIXES
diff --git a/src/java/org/apache/hadoop/hbase/HConstants.java b/src/java/org/apache/hadoop/hbase/HConstants.java
index db796149730..2ec6f0f8026 100644
--- a/src/java/org/apache/hadoop/hbase/HConstants.java
+++ b/src/java/org/apache/hadoop/hbase/HConstants.java
@@ -31,6 +31,7 @@ public interface HConstants {
static final Long ZERO_L = Long.valueOf(0L);
static final String NINES = "99999999999999";
+ static final String ZEROES = "00000000000000";
// For migration
diff --git a/src/java/org/apache/hadoop/hbase/HTableDescriptor.java b/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 1e75637976b..234289b649d 100644
--- a/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -182,6 +182,7 @@ public class HTableDescriptor implements WritableComparable {
* descriptors.
* @see #getNameAsString()
*/
+ @Override
public String toString() {
return "name: " + Bytes.toString(this.name) + ", families: " +
this.families.values();
diff --git a/src/java/org/apache/hadoop/hbase/client/HConnection.java b/src/java/org/apache/hadoop/hbase/client/HConnection.java
index 26479ea4fac..b845abae529 100644
--- a/src/java/org/apache/hadoop/hbase/client/HConnection.java
+++ b/src/java/org/apache/hadoop/hbase/client/HConnection.java
@@ -60,6 +60,14 @@ public interface HConnection {
*/
public HTableDescriptor[] listTables() throws IOException;
+ /**
+ * @param tableName
+ * @return table metadata
+ * @throws IOException
+ */
+ public HTableDescriptor getHTableDescriptor(byte[] tableName)
+ throws IOException;
+
/**
* Find the location of the region of tableName that row
* lives in.
diff --git a/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
index 5fa0c03212b..26cb8939ce2 100644
--- a/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
+++ b/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
@@ -202,9 +202,8 @@ public class HConnectionManager implements HConstants {
if (this.master == null) {
if (masterLocation == null) {
throw new MasterNotRunningException();
- } else {
- throw new MasterNotRunningException(masterLocation.toString());
}
+ throw new MasterNotRunningException(masterLocation.toString());
}
return this.master;
}
@@ -267,8 +266,11 @@ public class HConnectionManager implements HConstants {
MetaScannerVisitor visitor = new MetaScannerVisitor() {
- public boolean processRow(RowResult rowResult,
- HRegionLocation metaLocation, HRegionInfo info) throws IOException {
+ /** {@inheritDoc} */
+ public boolean processRow(
+ @SuppressWarnings("unused") RowResult rowResult,
+ @SuppressWarnings("unused") HRegionLocation metaLocation,
+ HRegionInfo info) {
// Only examine the rows where the startKey is zero length
if (info.getStartKey().length == 0) {
@@ -283,6 +285,40 @@ public class HConnectionManager implements HConstants {
return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]);
}
+ /**
+ * @param tableName
+ * @return table metadata
+ * @throws IOException
+ */
+ public HTableDescriptor getHTableDescriptor(byte[] tableName)
+ throws IOException {
+ if (Bytes.equals(tableName, HConstants.ROOT_TABLE_NAME)) {
+ return new UnmodifyableHTableDescriptor(HTableDescriptor.ROOT_TABLEDESC);
+ }
+ HTable meta = new HTable(conf,
+ Bytes.equals(tableName, HConstants.META_TABLE_NAME) ?
+ HConstants.ROOT_TABLE_NAME : HConstants.META_TABLE_NAME);
+ Scanner s = meta.getScanner(HConstants.COL_REGIONINFO_ARRAY,
+ HRegionInfo.createRegionName(tableName, null, HConstants.ZEROES));
+ try {
+ RowResult r = null;
+ while ((r = s.next()) != null) {
+ Cell c = r.get(HConstants.COL_REGIONINFO);
+ if (c != null) {
+ HRegionInfo info = Writables.getHRegionInfoOrNull(c.getValue());
+ if (info != null) {
+ if (Bytes.equals(info.getTableDesc().getName(), tableName)) {
+ return new UnmodifyableHTableDescriptor(info.getTableDesc());
+ }
+ }
+ }
+ }
+ return null;
+ } finally {
+ s.close();
+ }
+ }
+
/** {@inheritDoc} */
public HRegionLocation locateRegion(final byte [] tableName,
final byte [] row)
diff --git a/src/java/org/apache/hadoop/hbase/client/HTable.java b/src/java/org/apache/hadoop/hbase/client/HTable.java
index e7c2b7326e7..f20647cfa29 100644
--- a/src/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/src/java/org/apache/hadoop/hbase/client/HTable.java
@@ -270,7 +270,9 @@ public class HTable {
// The root region is always online
return false;
}
- HTable meta = new HTable(conf, HConstants.META_TABLE_NAME);
+ HTable meta = new HTable(conf,
+ Bytes.equals(tableName, HConstants.META_TABLE_NAME) ?
+ HConstants.ROOT_TABLE_NAME : HConstants.META_TABLE_NAME);
Scanner s = meta.getScanner(HConstants.COL_REGIONINFO_ARRAY,
HRegionInfo.createRegionName(tableName, null, HConstants.NINES));
try {
@@ -336,20 +338,12 @@ public class HTable {
}
/**
- * TODO: Make the return read-only.
* @return table metadata
* @throws IOException
*/
+ @Deprecated
public HTableDescriptor getMetadata() throws IOException {
- HTableDescriptor [] metas = this.connection.listTables();
- HTableDescriptor result = null;
- for (int i = 0; i < metas.length; i++) {
- if (Bytes.equals(metas[i].getName(), this.tableName)) {
- result = metas[i];
- break;
- }
- }
- return result;
+ return this.connection.getHTableDescriptor(this.tableName);
}
/**
@@ -388,16 +382,15 @@ public class HTable {
* @return A map of HRegionInfo with it's server address
* @throws IOException
*/
- @SuppressWarnings("null")
public Map getRegionsInfo() throws IOException {
final HashMap regionMap =
new HashMap();
MetaScannerVisitor visitor = new MetaScannerVisitor() {
- @SuppressWarnings("unused")
public boolean processRow(@SuppressWarnings("unused") RowResult rowResult,
- HRegionLocation metaLocation, HRegionInfo info)
- throws IOException {
+ HRegionLocation metaLocation, HRegionInfo hri) {
+
+ HRegionInfo info = new UnmodifyableHRegionInfo(hri);
if (!(Bytes.equals(info.getTableDesc().getName(), getTableName()))) {
return false;
}
diff --git a/src/java/org/apache/hadoop/hbase/client/MetaScanner.java b/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
index b24ee37df4b..731213f1b6a 100644
--- a/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
+++ b/src/java/org/apache/hadoop/hbase/client/MetaScanner.java
@@ -89,6 +89,7 @@ class MetaScanner implements HConstants {
*
* @param rowResult
* @param metaLocation
+ * @param info
* @return A boolean to know if it should continue to loop in the region
* @throws IOException
*/
diff --git a/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
new file mode 100644
index 00000000000..c443c8ac8fe
--- /dev/null
+++ b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
@@ -0,0 +1,87 @@
+/**
+ * Copyright 2008 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+
+class UnmodifyableHRegionInfo extends HRegionInfo {
+ /* Default constructor - creates empty object */
+ UnmodifyableHRegionInfo() {
+ super(new UnmodifyableHTableDescriptor(), null, null);
+ }
+
+ /*
+ * Construct HRegionInfo with explicit parameters
+ *
+ * @param tableDesc the table descriptor
+ * @param startKey first key in region
+ * @param endKey end of key range
+ * @throws IllegalArgumentException
+ */
+ UnmodifyableHRegionInfo(final HTableDescriptor tableDesc,
+ final byte [] startKey, final byte [] endKey)
+ throws IllegalArgumentException {
+ super(new UnmodifyableHTableDescriptor(tableDesc), startKey, endKey, false);
+ }
+
+ /*
+ * Construct HRegionInfo with explicit parameters
+ *
+ * @param tableDesc the table descriptor
+ * @param startKey first key in region
+ * @param endKey end of key range
+ * @param split true if this region has split and we have daughter regions
+ * regions that may or may not hold references to this region.
+ * @throws IllegalArgumentException
+ */
+ UnmodifyableHRegionInfo(HTableDescriptor tableDesc,
+ final byte [] startKey, final byte [] endKey, final boolean split)
+ throws IllegalArgumentException {
+ super(new UnmodifyableHTableDescriptor(tableDesc), startKey, endKey, split);
+ }
+
+ /*
+ * Creates an unmodifyable copy of an HRegionInfo
+ *
+ * @param info
+ */
+ UnmodifyableHRegionInfo(HRegionInfo info) {
+ super(new UnmodifyableHTableDescriptor(info.getTableDesc()),
+ info.getStartKey(), info.getEndKey(), info.isSplit());
+ }
+
+ /**
+ * @param split set split status
+ */
+ @Override
+ public void setSplit(boolean split) {
+ throw new UnsupportedOperationException("HRegionInfo is read-only");
+ }
+
+ /**
+ * @param offLine set online - offline status
+ */
+ @Override
+ public void setOffline(boolean offLine) {
+ throw new UnsupportedOperationException("HRegionInfo is read-only");
+ }
+}
diff --git a/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
new file mode 100644
index 00000000000..c090b417a0c
--- /dev/null
+++ b/src/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
@@ -0,0 +1,89 @@
+/**
+ * Copyright 2008 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.client;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.util.Bytes;
+
+class UnmodifyableHTableDescriptor extends HTableDescriptor {
+ /*
+ * Constructs an empty object.
+ * For deserializing an HTableDescriptor instance only.
+ */
+ UnmodifyableHTableDescriptor() {
+ super();
+ }
+
+ /*
+ * Constructor.
+ * @param name Table name.
+ * @throws IllegalArgumentException if passed a table name
+ * that is made of other than 'word' characters, underscore or period: i.e.
+ * [a-zA-Z_0-9.].
+ * @see HADOOP-1581 HBASE: Un-openable tablename bug
+ */
+ UnmodifyableHTableDescriptor(final String name) {
+ this(Bytes.toBytes(name));
+ }
+
+ /*
+ * Constructor.
+ * @param name Table name.
+ * @throws IllegalArgumentException if passed a table name
+ * that is made of other than 'word' characters, underscore or period: i.e.
+ * [a-zA-Z_0-9.].
+ * @see HADOOP-1581 HBASE: Un-openable tablename bug
+ */
+ UnmodifyableHTableDescriptor(final byte [] name) {
+ super(name);
+ }
+
+ /*
+ * Create an unmodifyable copy of an HTableDescriptor
+ * @param desc
+ */
+ UnmodifyableHTableDescriptor(final HTableDescriptor desc) {
+ super(desc.getName());
+ for (HColumnDescriptor c: desc.getFamilies()) {
+ super.addFamily(c);
+ }
+ }
+
+ /**
+ * Does NOT add a column family. This object is immutable
+ * @param family HColumnDescriptor of familyto add.
+ */
+ @Override
+ public void addFamily(final HColumnDescriptor family) {
+ throw new UnsupportedOperationException("HTableDescriptor is read-only");
+ }
+
+ /**
+ * @param column
+ * @return Column descriptor for the passed family name or the family on
+ * passed in column.
+ */
+ @Override
+ public HColumnDescriptor removeFamily(final byte [] column) {
+ throw new UnsupportedOperationException("HTableDescriptor is read-only");
+ }
+}
diff --git a/src/test/org/apache/hadoop/hbase/client/TestHTable.java b/src/test/org/apache/hadoop/hbase/client/TestHTable.java
index 6060fa46e88..1bee1061343 100644
--- a/src/test/org/apache/hadoop/hbase/client/TestHTable.java
+++ b/src/test/org/apache/hadoop/hbase/client/TestHTable.java
@@ -22,8 +22,6 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -39,7 +37,6 @@ import org.apache.hadoop.io.Text;
* Tests HTable
*/
public class TestHTable extends HBaseClusterTestCase implements HConstants {
- private static final Log LOG = LogFactory.getLog(TestHTable.class);
private static final HColumnDescriptor column =
new HColumnDescriptor(COLUMN_FAMILY);
@@ -84,7 +81,8 @@ public class TestHTable extends HBaseClusterTestCase implements HConstants {
HTable a = new HTable(conf, tableAname);
// Assert the metadata is good.
- HTableDescriptor meta = a.getMetadata();
+ HTableDescriptor meta =
+ a.getConnection().getHTableDescriptor(tableAdesc.getName());
assertTrue(meta.equals(tableAdesc));
BatchUpdate batchUpdate = new BatchUpdate(row);