test suite working again

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@785453 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Kyle Purtell 2009-06-17 00:06:44 +00:00
parent 306e9851f0
commit e0f5c9c195
3 changed files with 21 additions and 15 deletions

View File

@ -58,9 +58,9 @@ public abstract class HBaseTestCase extends TestCase {
/** configuration parameter name for test directory */
public static final String TEST_DIRECTORY_KEY = "test.build.data";
protected final static byte [] fam1 = Bytes.toBytes("colfamily1:");
protected final static byte [] fam2 = Bytes.toBytes("colfamily2:");
protected final static byte [] fam3 = Bytes.toBytes("colfamily3:");
protected final static byte [] fam1 = Bytes.toBytes("colfamily1");
protected final static byte [] fam2 = Bytes.toBytes("colfamily2");
protected final static byte [] fam3 = Bytes.toBytes("colfamily3");
protected static final byte [][] COLUMNS = {fam1,
fam2, fam3};
@ -309,8 +309,22 @@ public abstract class HBaseTestCase extends TestCase {
put.setTimeStamp(ts);
}
try {
String col = column != null ? column : columnFamily;
byte[][] split = KeyValue.parseColumn(Bytes.toBytes(col));
StringBuilder sb = new StringBuilder();
if (column != null && column.contains(":")) {
sb.append(column);
} else {
if (columnFamily != null) {
sb.append(columnFamily);
if (!columnFamily.endsWith(":")) {
sb.append(":");
}
if (column != null) {
sb.append(column);
}
}
}
byte[][] split =
KeyValue.parseColumn(Bytes.toBytes(sb.toString()));
put.add(split[0], split[1], t);
updater.put(put);
count++;

View File

@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TimestampTestBase;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Tests user specifiable time stamps putting, getting and scanning. Also
@ -32,7 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* run against an HRegion and against an HTable: i.e. both local and remote.
*/
public class TestOldAPITimestamp extends HBaseClusterTestCase {
public static byte[] COLUMN_NAME = Bytes.toBytes("contents:");
public static String COLUMN_NAME = "contents:";
/**
* Basic test of timestamps.
@ -64,9 +63,6 @@ public class TestOldAPITimestamp extends HBaseClusterTestCase {
*/
private HTable createTable() throws IOException {
HTableDescriptor desc = new HTableDescriptor(getName());
for (byte[] col: COLUMNS) {
desc.addFamily(new HColumnDescriptor(col));
}
desc.addFamily(new HColumnDescriptor(COLUMN_NAME));
HBaseAdmin admin = new HBaseAdmin(conf);
admin.createTable(desc);

View File

@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TimestampTestBase;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Tests user specifiable time stamps putting, getting and scanning. Also
@ -32,7 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* run against an HRegion and against an HTable: i.e. both local and remote.
*/
public class TestTimestamp extends HBaseClusterTestCase {
public static byte[] COLUMN_NAME = Bytes.toBytes("contents:");
public static String COLUMN_NAME = "contents";
/** constructor */
public TestTimestamp() {
@ -69,9 +68,6 @@ public class TestTimestamp extends HBaseClusterTestCase {
*/
private HTable createTable() throws IOException {
HTableDescriptor desc = new HTableDescriptor(getName());
for (byte[] col: COLUMNS) {
desc.addFamily(new HColumnDescriptor(col));
}
desc.addFamily(new HColumnDescriptor(COLUMN_NAME));
HBaseAdmin admin = new HBaseAdmin(conf);
admin.createTable(desc);