diff --git a/bin/hbase-daemon.sh b/bin/hbase-daemon.sh index 91a15ec87d0..e45054b5ac7 100755 --- a/bin/hbase-daemon.sh +++ b/bin/hbase-daemon.sh @@ -36,7 +36,7 @@ usage="Usage: hbase-daemon.sh [--config ]\ (start|stop|restart|autorestart) \ - [--formatZK] [--formatFS] " + " # if no args specified, show usage if [ $# -le 1 ]; then @@ -57,19 +57,6 @@ shift command=$1 shift -if [ "$startStop" = "start" ];then - for i in 1 2 - do - if [ "$1" = "--formatZK" ];then - formatzk=$1 - shift - elif [ "$1" = "--formatFS" ];then - formatfs=$1 - shift - fi - done -fi - hbase_rotate_log () { log=$1; @@ -111,10 +98,6 @@ check_before_start(){ fi } -clear_hbase_data() { - $bin/hbase-cleanup.sh $formatzk $formatfs -} - wait_until_done () { p=$1 @@ -189,7 +172,6 @@ case $startStop in (start) check_before_start - clear_hbase_data nohup $thiscmd --config "${HBASE_CONF_DIR}" internal_start $command $args < /dev/null > /dev/null 2>&1 & ;; diff --git a/bin/start-hbase.sh b/bin/start-hbase.sh index 672a0e89ed0..8fca03ca776 100755 --- a/bin/start-hbase.sh +++ b/bin/start-hbase.sh @@ -24,7 +24,7 @@ # Start hadoop hbase daemons. # Run this on master node. -usage="Usage: start-hbase.sh [autorestart] [--formatZK] [--formatFS]" +usage="Usage: start-hbase.sh" bin=`dirname "${BASH_SOURCE-$0}"` bin=`cd "$bin">/dev/null; pwd` @@ -37,19 +37,12 @@ if [ $errCode -ne 0 ] then exit $errCode fi -for i in 1 2 3 -do - if [ "$1" = "autorestart" ];then - commandToRun="autorestart" - elif [ "$1" = "--formatZK" ];then - formatzk=$1 - elif [ "$1" = "--formatFS" ];then - formatfs=$1 - fi - shift -done -if [ "$commandToRun" = "" ];then + +if [ "$1" = "autorestart" ] +then + commandToRun="autorestart" +else commandToRun="start" fi @@ -59,10 +52,10 @@ distMode=`$bin/hbase --config "$HBASE_CONF_DIR" org.apache.hadoop.hbase.util.HBa if [ "$distMode" == 'false' ] then - "$bin"/hbase-daemon.sh $commandToRun master $formatzk $formatfs + "$bin"/hbase-daemon.sh $commandToRun master else "$bin"/hbase-daemons.sh --config "${HBASE_CONF_DIR}" $commandToRun zookeeper - "$bin"/hbase-daemon.sh --config "${HBASE_CONF_DIR}" $commandToRun master $formatzk $formatfs + "$bin"/hbase-daemon.sh --config "${HBASE_CONF_DIR}" $commandToRun master "$bin"/hbase-daemons.sh --config "${HBASE_CONF_DIR}" \ --hosts "${HBASE_REGIONSERVERS}" $commandToRun regionserver "$bin"/hbase-daemons.sh --config "${HBASE_CONF_DIR}" \ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 4e2c2a82942..ecb0826b5ed 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -1125,10 +1125,12 @@ public class HColumnDescriptor implements WritableComparable public int compareTo(HColumnDescriptor o) { int result = Bytes.compareTo(this.name, o.getName()); if (result == 0) { - // The maps interface should compare values, even if they're in different orders - if (!this.values.equals(o.values)) { - return 1; - } + // punt on comparison for ordering, just calculate difference + result = this.values.hashCode() - o.values.hashCode(); + if (result < 0) + result = -1; + else if (result > 0) + result = 1; } if (result == 0) { result = this.configuration.hashCode() - o.configuration.hashCode(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index b27826856f7..8d2afcf36ef 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -225,7 +225,9 @@ public class HTableDescriptor implements WritableComparable { * catalog tables, .META. and -ROOT-. */ protected HTableDescriptor(final byte [] name, HColumnDescriptor[] families) { - setName(name); + this.name = name.clone(); + this.nameAsString = Bytes.toString(this.name); + setMetaFlags(name); for(HColumnDescriptor descriptor : families) { this.families.put(descriptor.getName(), descriptor); } @@ -237,7 +239,12 @@ public class HTableDescriptor implements WritableComparable { */ protected HTableDescriptor(final byte [] name, HColumnDescriptor[] families, Map values) { - this(name.clone(), families); + this.name = name.clone(); + this.nameAsString = Bytes.toString(this.name); + setMetaFlags(name); + for(HColumnDescriptor descriptor : families) { + this.families.put(descriptor.getName(), descriptor); + } for (Map.Entry entry: values.entrySet()) { setValue(entry.getKey(), entry.getValue()); @@ -277,7 +284,9 @@ public class HTableDescriptor implements WritableComparable { */ public HTableDescriptor(final byte [] name) { super(); - setName(name); + setMetaFlags(this.name); + this.name = this.isMetaRegion()? name: isLegalTableName(name); + this.nameAsString = Bytes.toString(this.name); } /** @@ -289,7 +298,9 @@ public class HTableDescriptor implements WritableComparable { */ public HTableDescriptor(final HTableDescriptor desc) { super(); - setName(desc.name.clone()); + this.name = desc.name.clone(); + this.nameAsString = Bytes.toString(this.name); + setMetaFlags(this.name); for (HColumnDescriptor c: desc.families.values()) { this.families.put(c.getName(), new HColumnDescriptor(c)); } @@ -639,13 +650,9 @@ public class HTableDescriptor implements WritableComparable { * Set the name of the table. * * @param name name of table - * @throws IllegalArgumentException if passed a table name - * that is made of other than 'word' characters, underscore or period: i.e. - * [a-zA-Z_0-9.]. - * @see HADOOP-1581 HBASE: Un-openable tablename bug */ public void setName(byte[] name) { - this.name = isMetaTable(name) ? name : isLegalTableName(name); + this.name = name; this.nameAsString = Bytes.toString(this.name); setMetaFlags(this.name); } @@ -980,34 +987,39 @@ public class HTableDescriptor implements WritableComparable { */ @Override public int compareTo(final HTableDescriptor other) { - // Check name matches int result = Bytes.compareTo(this.name, other.name); - if (result != 0) return result; - - // Check size matches - result = families.size() - other.families.size(); - if (result != 0) return result; - - // Compare that all column families - for (Iterator it = families.values().iterator(), - it2 = other.families.values().iterator(); it.hasNext(); ) { - result = it.next().compareTo(it2.next()); - if (result != 0) { - return result; + if (result == 0) { + result = families.size() - other.families.size(); + } + if (result == 0 && families.size() != other.families.size()) { + result = Integer.valueOf(families.size()).compareTo( + Integer.valueOf(other.families.size())); + } + if (result == 0) { + for (Iterator it = families.values().iterator(), + it2 = other.families.values().iterator(); it.hasNext(); ) { + result = it.next().compareTo(it2.next()); + if (result != 0) { + break; + } } } - - // Compare values - if (!values.equals(other.values)) { - return 1; + if (result == 0) { + // punt on comparison for ordering, just calculate difference + result = this.values.hashCode() - other.values.hashCode(); + if (result < 0) + result = -1; + else if (result > 0) + result = 1; } - - // Compare configuration - if (!configuration.equals(other.configuration)) { - return 1; + if (result == 0) { + result = this.configuration.hashCode() - other.configuration.hashCode(); + if (result < 0) + result = -1; + else if (result > 0) + result = 1; } - - return 0; + return result; } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java index fb4506555bb..253460936dd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.regionserver.BloomType; import org.junit.experimental.categories.Category; @@ -96,71 +94,4 @@ public class TestHColumnDescriptor { desc.removeConfiguration(key); assertEquals(null, desc.getConfigurationValue(key)); } - - @Test - public void testEqualsWithFamilyName() { - final String name1 = "someFamilyName"; - HColumnDescriptor hcd1 = new HColumnDescriptor(name1); - HColumnDescriptor hcd2 = new HColumnDescriptor("someOtherFamilyName"); - HColumnDescriptor hcd3 = new HColumnDescriptor(name1); - - assertFalse(hcd1.equals(hcd2)); - assertFalse(hcd2.equals(hcd1)); - - assertTrue(hcd3.equals(hcd1)); - assertTrue(hcd1.equals(hcd3)); - } - - @Test - public void testEqualsWithAdditionalProperties() { - final String name1 = "someFamilyName"; - HColumnDescriptor hcd1 = new HColumnDescriptor(name1); - HColumnDescriptor hcd2 = new HColumnDescriptor(name1); - hcd2.setBlocksize(4); - - assertFalse(hcd1.equals(hcd2)); - assertFalse(hcd2.equals(hcd1)); - - hcd1.setBlocksize(4); - - assertTrue(hcd2.equals(hcd1)); - assertTrue(hcd1.equals(hcd2)); - } - - @Test - public void testEqualsWithDifferentNumberOfProperties() { - final String name1 = "someFamilyName"; - HColumnDescriptor hcd1 = new HColumnDescriptor(name1); - HColumnDescriptor hcd2 = new HColumnDescriptor(name1); - hcd2.setBlocksize(4); - hcd1.setBlocksize(4); - - assertTrue(hcd2.equals(hcd1)); - assertTrue(hcd1.equals(hcd2)); - - hcd2.setBloomFilterType(BloomType.ROW); - - assertFalse(hcd1.equals(hcd2)); - assertFalse(hcd2.equals(hcd1)); - } - - @Test - public void testEqualsWithDifferentOrderingOfProperties() { - final String name1 = "someFamilyName"; - HColumnDescriptor hcd1 = new HColumnDescriptor(name1); - HColumnDescriptor hcd2 = new HColumnDescriptor(name1); - hcd2.setBlocksize(4); - hcd2.setBloomFilterType(BloomType.ROW); - hcd1.setBloomFilterType(BloomType.ROW); - hcd1.setBlocksize(4); - - assertTrue(hcd2.equals(hcd1)); - assertTrue(hcd1.equals(hcd2)); - } - - @Test - public void testEqualityWithSameObject() { - HColumnDescriptor hcd1 = new HColumnDescriptor("someName"); - assertTrue(hcd1.equals(hcd1)); - } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java index 5f06b429c91..bc8e72c8689 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java @@ -198,173 +198,4 @@ public class TestHTableDescriptor { desc.removeConfiguration(key); assertEquals(null, desc.getConfigurationValue(key)); } - - @Test - public void testEqualsWithDifferentProperties() { - // Test basic property difference - HTableDescriptor h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - HTableDescriptor h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n2")); - - assertFalse(h2.equals(h1)); - assertFalse(h1.equals(h2)); - - h2.setName(Bytes.toBytes("n1")); - assertTrue(h2.equals(h1)); - assertTrue(h1.equals(h2)); - } - - @Test - public void testEqualsWithDifferentNumberOfItems() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - // Test diff # of items - h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n1")); - - HColumnDescriptor hcd1 = new HColumnDescriptor(Bytes.toBytes("someName")); - HColumnDescriptor hcd2 = new HColumnDescriptor(Bytes.toBytes("someOtherName")); - - h1.addFamily(hcd1); - h2.addFamily(hcd1); - h1.addFamily(hcd2); - - assertFalse(h2.equals(h1)); - assertFalse(h1.equals(h2)); - - h2.addFamily(hcd2); - - assertTrue(h2.equals(h1)); - assertTrue(h1.equals(h2)); - } - - @Test - public void testNotEqualsWithDifferentHCDs() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - // Test diff # of items - h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n1")); - - HColumnDescriptor hcd1 = new HColumnDescriptor(Bytes.toBytes("someName")); - HColumnDescriptor hcd2 = new HColumnDescriptor(Bytes.toBytes("someOtherName")); - - h1.addFamily(hcd1); - h2.addFamily(hcd2); - - assertFalse(h2.equals(h1)); - assertFalse(h1.equals(h2)); - } - - @Test - public void testEqualsWithDifferentHCDObjects() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - // Test diff # of items - h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n1")); - - HColumnDescriptor hcd1 = new HColumnDescriptor(Bytes.toBytes("someName")); - HColumnDescriptor hcd2 = new HColumnDescriptor(Bytes.toBytes("someName")); - - h1.addFamily(hcd1); - h2.addFamily(hcd2); - - assertTrue(h2.equals(h1)); - assertTrue(h1.equals(h2)); - } - - @Test - public void testNotEqualsWithDifferentItems() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - // Test diff # of items - h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n1")); - - HColumnDescriptor hcd1 = new HColumnDescriptor(Bytes.toBytes("someName")); - HColumnDescriptor hcd2 = new HColumnDescriptor(Bytes.toBytes("someOtherName")); - h1.addFamily(hcd1); - h2.addFamily(hcd2); - - assertFalse(h2.equals(h1)); - assertFalse(h1.equals(h2)); - } - - @Test - public void testEqualsWithDifferentOrderingsOfItems() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - //Test diff # of items - h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n1")); - - HColumnDescriptor hcd1 = new HColumnDescriptor(Bytes.toBytes("someName")); - HColumnDescriptor hcd2 = new HColumnDescriptor(Bytes.toBytes("someOtherName")); - h1.addFamily(hcd1); - h2.addFamily(hcd2); - h1.addFamily(hcd2); - h2.addFamily(hcd1); - - assertTrue(h2.equals(h1)); - assertTrue(h1.equals(h2)); - } - - @Test - public void testSingleItemEquals() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - //Test diff # of items - h1 = new HTableDescriptor(); - h1.setName(Bytes.toBytes("n1")); - - h2 = new HTableDescriptor(); - h2.setName(Bytes.toBytes("n1")); - - HColumnDescriptor hcd1 = new HColumnDescriptor(Bytes.toBytes("someName")); - HColumnDescriptor hcd2 = new HColumnDescriptor(Bytes.toBytes("someName")); - h1.addFamily(hcd1); - h2.addFamily(hcd2); - - assertTrue(h2.equals(h1)); - assertTrue(h1.equals(h2)); - } - - @Test - public void testEmptyEquals() { - HTableDescriptor h1 = new HTableDescriptor(); - HTableDescriptor h2 = new HTableDescriptor(); - - assertTrue(h2.equals(h1)); - assertTrue(h1.equals(h2)); - } - - @Test - public void testEqualityWithSameObject() { - HTableDescriptor htd = new HTableDescriptor("someName"); - assertTrue(htd.equals(htd)); - } }