HBASE-4032 HBASE-451 improperly breaks public API HRegionInfo#getTableDesc
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1152532 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
27daf3e744
commit
50e3d546eb
@ -181,6 +181,7 @@ Release 0.91.0 - Unreleased
|
|||||||
HBASE-4138 If zookeeper.znode.parent is not specifed explicitly in Client
|
HBASE-4138 If zookeeper.znode.parent is not specifed explicitly in Client
|
||||||
code then HTable object loops continuously waiting for the root region
|
code then HTable object loops continuously waiting for the root region
|
||||||
by using /hbase as the base node.(ramkrishna.s.vasudevan)
|
by using /hbase as the base node.(ramkrishna.s.vasudevan)
|
||||||
|
HBASE-4032 HBASE-451 improperly breaks public API HRegionInfo#getTableDesc
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
HBASE-3290 Max Compaction Size (Nicolas Spiegelberg via Stack)
|
HBASE-3290 Max Compaction Size (Nicolas Spiegelberg via Stack)
|
||||||
|
@ -26,9 +26,13 @@ import java.util.Arrays;
|
|||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.KeyValue.KVComparator;
|
import org.apache.hadoop.hbase.KeyValue.KVComparator;
|
||||||
import org.apache.hadoop.hbase.migration.HRegionInfo090x;
|
import org.apache.hadoop.hbase.migration.HRegionInfo090x;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||||
import org.apache.hadoop.hbase.util.JenkinsHash;
|
import org.apache.hadoop.hbase.util.JenkinsHash;
|
||||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||||
import org.apache.hadoop.io.VersionedWritable;
|
import org.apache.hadoop.io.VersionedWritable;
|
||||||
@ -535,18 +539,50 @@ public class HRegionInfo extends VersionedWritable implements WritableComparable
|
|||||||
Bytes.equals(endKey, HConstants.EMPTY_BYTE_ARRAY));
|
Bytes.equals(endKey, HConstants.EMPTY_BYTE_ARRAY));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return the tableDesc */
|
/**
|
||||||
|
* @return the tableDesc
|
||||||
|
* @deprecated Do not use; expensive call
|
||||||
|
* use HRegionInfo.getTableNameAsString() in place of
|
||||||
|
* HRegionInfo.getTableDesc().getNameAsString()
|
||||||
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public HTableDescriptor getTableDesc() {
|
public HTableDescriptor getTableDesc() {
|
||||||
return null;
|
Configuration c = HBaseConfiguration.create();
|
||||||
|
FileSystem fs;
|
||||||
|
try {
|
||||||
|
fs = FileSystem.get(c);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
FSTableDescriptors fstd =
|
||||||
|
new FSTableDescriptors(fs, new Path(c.get(HConstants.HBASE_DIR)));
|
||||||
|
try {
|
||||||
|
return fstd.get(this.tableName);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param newDesc new table descriptor to use
|
* @param newDesc new table descriptor to use
|
||||||
|
* @deprecated Do not use; expensive call
|
||||||
*/
|
*/
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public void setTableDesc(HTableDescriptor newDesc) {
|
public void setTableDesc(HTableDescriptor newDesc) {
|
||||||
// do nothing.
|
Configuration c = HBaseConfiguration.create();
|
||||||
|
FileSystem fs;
|
||||||
|
try {
|
||||||
|
fs = FileSystem.get(c);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
FSTableDescriptors fstd =
|
||||||
|
new FSTableDescriptors(fs, new Path(c.get(HConstants.HBASE_DIR)));
|
||||||
|
try {
|
||||||
|
fstd.add(newDesc);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return true if this is the root region */
|
/** @return true if this is the root region */
|
||||||
|
@ -19,13 +19,20 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.hbase.util.FSUtils;
|
||||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import static org.junit.Assert.*;
|
|
||||||
|
|
||||||
public class TestHRegionInfo {
|
public class TestHRegionInfo {
|
||||||
@Test
|
@Test
|
||||||
@ -52,6 +59,25 @@ public class TestHRegionInfo {
|
|||||||
nameStr);
|
nameStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetSetOfHTD() {
|
||||||
|
HBaseTestingUtility HTU = new HBaseTestingUtility();
|
||||||
|
final String tablename = "testGetSetOfHTD";
|
||||||
|
HTableDescriptor htd = new HTableDescriptor(tablename);
|
||||||
|
FSUtils.createTableDescriptor(htd, HTU.getConfiguration());
|
||||||
|
HRegionInfo hri = new HRegionInfo(Bytes.toBytes("testGetSetOfHTD"),
|
||||||
|
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
|
||||||
|
HTableDescriptor htd2 = hri.getTableDesc();
|
||||||
|
assertTrue(htd.equals(htd2));
|
||||||
|
final String key = "SOME_KEY";
|
||||||
|
assertNull(htd.getValue(key));
|
||||||
|
final String value = "VALUE";
|
||||||
|
htd.setValue(key, value);
|
||||||
|
hri.setTableDesc(htd);
|
||||||
|
HTableDescriptor htd3 = hri.getTableDesc();
|
||||||
|
assertTrue(htd.equals(htd3));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testContainsRange() {
|
public void testContainsRange() {
|
||||||
HTableDescriptor tableDesc = new HTableDescriptor("testtable");
|
HTableDescriptor tableDesc = new HTableDescriptor("testtable");
|
||||||
|
Loading…
x
Reference in New Issue
Block a user