HADOOP-1821 Replace all String.getBytes() with String.getBytes("UTF-8")

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@571711 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jim Kellerman 2007-09-01 06:22:01 +00:00
parent a1689adf0e
commit 844e56e704
27 changed files with 235 additions and 101 deletions

View File

@ -28,6 +28,7 @@ Trunk (unreleased changes)
(Ning Li via Stack)
HADOOP-1800 output should default utf8 encoding
HADOOP-1814 TestCleanRegionServerExit fails too often on Hudson
HADOOP-1821 Replace all String.getBytes() with String.getBytes("UTF-8")
IMPROVEMENTS
HADOOP-1737 Make HColumnDescriptor data publically members settable

View File

@ -19,7 +19,6 @@
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.Text;
/**
@ -86,11 +85,24 @@ public interface HConstants {
// Always store the location of the root table's HRegion.
// This HRegion is never split.
// region name = table + startkey + regionid. This is the row key.
// each row in the root and meta tables describes exactly 1 region
// Do we ever need to know all the information that we are storing?
// Note that the name of the root table starts with "-" and the name of the
// meta table starts with "." Why? it's a trick. It turns out that when we
// store region names in memory, we use a SortedMap. Since "-" sorts before
// "." (and since no other table name can start with either of these
// characters, the root region will always be the first entry in such a Map,
// followed by all the meta regions (which will be ordered by their starting
// row key as well), followed by all user tables. So when the Master is
// choosing regions to assign, it will always choose the root region first,
// followed by the meta regions, followed by user regions. Since the root
// and meta regions always need to be on-line, this ensures that they will
// be the first to be reassigned if the server(s) they are being served by
// should go down.
/** The root table's name. */
static final Text ROOT_TABLE_NAME = new Text("-ROOT-");
@ -133,11 +145,4 @@ public interface HConstants {
/** When we encode strings, we always specify UTF8 encoding */
static final String UTF8_ENCODING = "UTF-8";
/** Value stored for a deleted item */
static final ImmutableBytesWritable DELETE_BYTES =
new ImmutableBytesWritable("HBASE::DELETEVAL".getBytes());
/** Value written to HLog on a complete cache flush */
static final ImmutableBytesWritable COMPLETE_CACHEFLUSH =
new ImmutableBytesWritable("HBASE::CACHEFLUSH".getBytes());
}

View File

@ -19,14 +19,28 @@
*/
package org.apache.hadoop.hbase;
import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
/**
* Global values used for finding and scanning the root and meta tables.
* Global values that require initialization that cannot be done in HConstants
*/
public class HGlobals implements HConstants {
/** table descriptor for root table */
public static HTableDescriptor rootTableDesc = null;
static HTableDescriptor rootTableDesc = null;
static HRegionInfo rootRegionInfo = null;
static HTableDescriptor metaTableDesc = null;
/** region info for the root region */
public static HRegionInfo rootRegionInfo = null;
/** table descriptor for meta table */
public static HTableDescriptor metaTableDesc = null;
/** Value stored for a deleted item */
public static ImmutableBytesWritable deleteBytes = null;
/** Value written to HLog on a complete cache flush */
public static ImmutableBytesWritable completeCacheFlush = null;
static {
rootTableDesc = new HTableDescriptor(ROOT_TABLE_NAME.toString());
@ -38,5 +52,17 @@ public class HGlobals implements HConstants {
metaTableDesc = new HTableDescriptor(META_TABLE_NAME.toString());
metaTableDesc.addFamily(new HColumnDescriptor(COLUMN_FAMILY, 1,
HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE, null));
try {
deleteBytes =
new ImmutableBytesWritable("HBASE::DELETEVAL".getBytes(UTF8_ENCODING));
completeCacheFlush =
new ImmutableBytesWritable("HBASE::CACHEFLUSH".getBytes(UTF8_ENCODING));
} catch (UnsupportedEncodingException e) {
assert(false);
}
}
}

View File

@ -433,7 +433,7 @@ public class HLog implements HConstants {
}
writer.append(new HLogKey(regionName, tableName, HLog.METAROW, logSeqId),
new HLogEdit(HLog.METACOLUMN, COMPLETE_CACHEFLUSH.get(),
new HLogEdit(HLog.METACOLUMN, HGlobals.completeCacheFlush.get(),
System.currentTimeMillis()));
numEntries.getAndIncrement();

View File

@ -258,7 +258,7 @@ public class HMemcache {
for (Map.Entry<HStoreKey, byte []> es: tailMap.entrySet()) {
HStoreKey itKey = es.getKey();
if (itKey.matchesRowCol(curKey)) {
if(HConstants.DELETE_BYTES.compareTo(es.getValue()) == 0) {
if(HGlobals.deleteBytes.compareTo(es.getValue()) == 0) {
// TODO: Shouldn't this be a continue rather than a break? Perhaps
// the intent is that this DELETE_BYTES is meant to suppress older
// info -- see 5.4 Compactions in BigTable -- but how does this jibe

View File

@ -1103,7 +1103,7 @@ public class HRegion implements HConstants {
* @throws IOException
*/
public void put(long lockid, Text targetCol, byte [] val) throws IOException {
if (DELETE_BYTES.compareTo(val) == 0) {
if (HGlobals.deleteBytes.compareTo(val) == 0) {
throw new IOException("Cannot insert value: " + val);
}
localput(lockid, targetCol, val);
@ -1117,7 +1117,7 @@ public class HRegion implements HConstants {
* @throws IOException
*/
public void delete(long lockid, Text targetCol) throws IOException {
localput(lockid, targetCol, DELETE_BYTES.get());
localput(lockid, targetCol, HGlobals.deleteBytes.get());
}
/**

View File

@ -1064,7 +1064,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
for(Map.Entry<Text, byte []> e: results.entrySet()) {
HStoreKey k = new HStoreKey(key.getRow(), e.getKey(), key.getTimestamp());
byte [] val = e.getValue();
if (DELETE_BYTES.compareTo(val) == 0) {
if (HGlobals.deleteBytes.compareTo(val) == 0) {
// Column value is deleted. Don't return it.
continue;
}

View File

@ -1033,7 +1033,7 @@ class HStore implements HConstants {
Text readcol = readkey.getColumn();
if (results.get(readcol) == null
&& key.matchesWithoutColumn(readkey)) {
if(readval.equals(HConstants.DELETE_BYTES)) {
if(readval.equals(HGlobals.deleteBytes)) {
break;
}
results.put(new Text(readcol), readval.get());
@ -1086,14 +1086,14 @@ class HStore implements HConstants {
continue;
}
if (readkey.matchesRowCol(key)) {
if(readval.equals(HConstants.DELETE_BYTES)) {
if(readval.equals(HGlobals.deleteBytes)) {
break;
}
results.add(readval.get());
readval = new ImmutableBytesWritable();
while(map.next(readkey, readval) && readkey.matchesRowCol(key)) {
if ((numVersions > 0 && (results.size() >= numVersions))
|| readval.equals(HConstants.DELETE_BYTES)) {
|| readval.equals(HGlobals.deleteBytes)) {
break;
}
results.add(readval.get());

View File

@ -40,8 +40,12 @@ public class Shell {
/** audible keyboard bells */
public static final boolean DEFAULT_BELL_ENABLED = true;
/** Main method */
public static void main(String args[]) throws IOException {
/** Main method
*
* @param args not used
* @throws IOException
*/
public static void main(@SuppressWarnings("unused") String args[]) throws IOException {
Configuration conf = new HBaseConfiguration();
ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
@ -91,8 +95,14 @@ public class Shell {
return (queryStr.toString().equals("")) ? "HBase > " : " --> ";
}
/** return a string of code execution time. */
/**
* @param watch true if execution time should be computed and returned
* @param start start of time interval
* @param end end of time interval
* @return a string of code execution time. */
public static String executeTime(boolean watch, long start, long end) {
return (watch) ? "(" + String.format("%.2f", (end - start) * 0.001) + " sec)" : "";
return (watch) ?
"(" + String.format("%.2f", (end - start) * 0.001) + " sec)" :
"";
}
}

View File

@ -33,7 +33,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HGlobals;
import org.apache.hadoop.io.Text;
/**
@ -176,7 +176,7 @@ public class RegExpRowFilter implements RowFilterInterface {
}
}
if (nullColumns.contains(colKey)) {
if (data != null && !Arrays.equals(HConstants.DELETE_BYTES.get(), data)) {
if (data != null && !Arrays.equals(HGlobals.deleteBytes.get(), data)) {
if (LOG.isDebugEnabled()) {
LOG.debug("filter returning true for rowKey: " + rowKey +
" colKey: " + colKey);
@ -198,7 +198,7 @@ public class RegExpRowFilter implements RowFilterInterface {
public boolean filterNotNull(final TreeMap<Text, byte[]> columns) {
for (Entry<Text, byte[]> col : columns.entrySet()) {
if (nullColumns.contains(col.getKey())
&& !Arrays.equals(HConstants.DELETE_BYTES.get(), col.getValue())) {
&& !Arrays.equals(HGlobals.deleteBytes.get(), col.getValue())) {
if (LOG.isDebugEnabled()) {
LOG.debug("filterNotNull returning true for colKey: " + col.getKey()
+ ", column should be null.");

View File

@ -20,9 +20,11 @@
package org.apache.hadoop.hbase.mapred;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Map;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@ -147,7 +149,11 @@ public class GroupingTableMap extends TableMap {
if(i > 0) {
sb.append(" ");
}
sb.append(new String(vals[i]));
try {
sb.append(new String(vals[i], HConstants.UTF8_ENCODING));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
return new Text(sb.toString());
}

View File

@ -159,7 +159,7 @@ public abstract class HBaseTestCase extends TestCase {
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
byte [] bytes = new byte [] {(byte)c, (byte)d, (byte)e};
Text t = new Text(new String(bytes));
Text t = new Text(new String(bytes, HConstants.UTF8_ENCODING));
if (endKey != null && endKey.getLength() > 0
&& endKey.compareTo(t) <= 0) {
break EXIT;

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
@ -312,7 +313,13 @@ public class PerformanceEvaluation implements HConstants {
while(val.length() < ROW_LENGTH) {
val.append(Long.toString(this.rand.nextLong()));
}
return val.toString().getBytes();
byte[] value = null;
try {
value = val.toString().getBytes(HConstants.UTF8_ENCODING);
} catch (UnsupportedEncodingException e) {
assert(false);
}
return value;
}
private String generateStatus(final int sr, final int i, final int lr) {

View File

@ -99,7 +99,7 @@ public class TestBatchUpdate extends HBaseClusterTestCase {
while(scanner.next(key, results)) {
for(Map.Entry<Text, byte[]> e: results.entrySet()) {
System.out.println(key + ": row: " + e.getKey() + " value: " +
new String(e.getValue()));
new String(e.getValue(), HConstants.UTF8_ENCODING));
}
}
} catch (Exception e) {

View File

@ -69,7 +69,7 @@ public class TestGet extends HBaseTestCase {
}
/**
* Constructor
* the test
* @throws IOException
*/
public void testGet() throws IOException {
@ -144,14 +144,14 @@ public class TestGet extends HBaseTestCase {
lockid = r.startUpdate(ROW_KEY);
r.put(lockid, new Text(HConstants.COLUMN_FAMILY + "region"),
"region2".getBytes());
"region2".getBytes(HConstants.UTF8_ENCODING));
String otherServerName = "bar.foo.com:4321";
r.put(lockid, HConstants.COL_SERVER,
Writables.stringToBytes(new HServerAddress(otherServerName).toString()));
r.put(lockid, new Text(HConstants.COLUMN_FAMILY + "junk"),
"junk".getBytes());
"junk".getBytes(HConstants.UTF8_ENCODING));
r.commit(lockid, System.currentTimeMillis());

View File

@ -90,8 +90,10 @@ public class TestHBaseCluster extends HBaseClusterTestCase {
for (int k = FIRST_ROW; k <= NUM_VALS; k++) {
long writeid = table.startUpdate(new Text("row_" + k));
table.put(writeid, CONTENTS_BASIC, (CONTENTSTR + k).getBytes());
table.put(writeid, new Text(ANCHORNUM + k), (ANCHORSTR + k).getBytes());
table.put(writeid, CONTENTS_BASIC,
(CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING));
table.put(writeid, new Text(ANCHORNUM + k),
(ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING));
table.commit(writeid);
}
System.out.println("Write " + NUM_VALS + " rows. Elapsed time: "
@ -107,14 +109,14 @@ public class TestHBaseCluster extends HBaseClusterTestCase {
byte bodydata[] = table.get(rowlabel, CONTENTS_BASIC);
assertNotNull(bodydata);
String bodystr = new String(bodydata).toString().trim();
String bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim();
String teststr = CONTENTSTR + k;
assertEquals("Incorrect value for key: (" + rowlabel + "," + CONTENTS_BASIC
+ "), expected: '" + teststr + "' got: '" + bodystr + "'",
bodystr, teststr);
collabel = new Text(ANCHORNUM + k);
bodydata = table.get(rowlabel, collabel);
bodystr = new String(bodydata).toString().trim();
bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim();
teststr = ANCHORSTR + k;
assertEquals("Incorrect value for key: (" + rowlabel + "," + collabel
+ "), expected: '" + teststr + "' got: '" + bodystr + "'",
@ -145,7 +147,7 @@ public class TestHBaseCluster extends HBaseClusterTestCase {
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte val[] = curVals.get(col);
String curval = new String(val).trim();
String curval = new String(val, HConstants.UTF8_ENCODING).trim();
if(col.compareTo(CONTENTS_BASIC) == 0) {
assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()

View File

@ -84,7 +84,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
assertEquals(tableName, key.getTablename());
assertEquals(HLog.METAROW, key.getRow());
assertEquals(HLog.METACOLUMN, val.getColumn());
assertEquals(0, COMPLETE_CACHEFLUSH.compareTo(val.getVal()));
assertEquals(0, HGlobals.completeCacheFlush.compareTo(val.getVal()));
System.out.println(key + " " + val);
}
} finally {

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
@ -85,7 +86,11 @@ public class TestHMemcache extends TestCase {
TreeMap<Text, byte []> columns = new TreeMap<Text, byte []>();
for (int ii = 0; ii < COLUMNS_COUNT; ii++) {
Text k = getColumnName(i, ii);
columns.put(k, k.toString().getBytes());
try {
columns.put(k, k.toString().getBytes(HConstants.UTF8_ENCODING));
} catch (UnsupportedEncodingException e) {
fail();
}
}
hmc.add(getRowName(i), columns, System.currentTimeMillis());
}
@ -147,7 +152,7 @@ public class TestHMemcache extends TestCase {
}
private void isExpectedRow(final int rowIndex,
TreeMap<Text, byte []> row) {
TreeMap<Text, byte []> row) throws UnsupportedEncodingException {
int i = 0;
for (Text colname: row.keySet()) {
String expectedColname =
@ -159,13 +164,15 @@ public class TestHMemcache extends TestCase {
// for BytesWriteable. For comparison, comvert bytes to
// String and trim to remove trailing null bytes.
byte [] value = row.get(colname);
String colvalueStr = new String(value).trim();
String colvalueStr = new String(value, HConstants.UTF8_ENCODING).trim();
assertEquals("Content", colnameStr, colvalueStr);
}
}
/** Test getFull from memcache */
public void testGetFull() {
/** Test getFull from memcache
* @throws UnsupportedEncodingException
*/
public void testGetFull() throws UnsupportedEncodingException {
addRows(this.hmemcache);
for (int i = 0; i < ROW_COUNT; i++) {
HStoreKey hsk = new HStoreKey(getRowName(i));

View File

@ -121,8 +121,10 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for (int k = FIRST_ROW; k <= NUM_VALS; k++) {
long writeid = region.startUpdate(new Text("row_" + k));
region.put(writeid, CONTENTS_BASIC, (CONTENTSTR + k).getBytes());
region.put(writeid, new Text(ANCHORNUM + k), (ANCHORSTR + k).getBytes());
region.put(writeid, CONTENTS_BASIC,
(CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING));
region.put(writeid, new Text(ANCHORNUM + k),
(ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING));
region.commit(writeid, System.currentTimeMillis());
}
System.out.println("Write " + NUM_VALS + " rows. Elapsed time: "
@ -147,14 +149,14 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
byte [] bodydata = region.get(rowlabel, CONTENTS_BASIC);
assertNotNull(bodydata);
String bodystr = new String(bodydata).toString().trim();
String bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim();
String teststr = CONTENTSTR + k;
assertEquals("Incorrect value for key: (" + rowlabel + "," + CONTENTS_BASIC
+ "), expected: '" + teststr + "' got: '" + bodystr + "'",
bodystr, teststr);
collabel = new Text(ANCHORNUM + k);
bodydata = region.get(rowlabel, collabel);
bodystr = new String(bodydata).toString().trim();
bodystr = new String(bodydata, HConstants.UTF8_ENCODING).trim();
teststr = ANCHORSTR + k;
assertEquals("Incorrect value for key: (" + rowlabel + "," + collabel
+ "), expected: '" + teststr + "' got: '" + bodystr + "'",
@ -170,7 +172,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
// Try put with bad lockid.
boolean exceptionThrown = false;
try {
region.put(-1, CONTENTS_BASIC, "bad input".getBytes());
region.put(-1, CONTENTS_BASIC,
"bad input".getBytes(HConstants.UTF8_ENCODING));
} catch (LockException e) {
exceptionThrown = true;
}
@ -183,7 +186,7 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
lockid = region.startUpdate(new Text("Some old key"));
String unregisteredColName = "FamilyGroup:FamilyLabel";
region.put(lockid, new Text(unregisteredColName),
unregisteredColName.getBytes());
unregisteredColName.getBytes(HConstants.UTF8_ENCODING));
} catch (IOException e) {
exceptionThrown = true;
} finally {
@ -276,8 +279,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
String kLabel = String.format("%1$03d", k);
long lockid = region.startUpdate(new Text("row_vals1_" + kLabel));
region.put(lockid, cols[0], vals1[k].getBytes());
region.put(lockid, cols[1], vals1[k].getBytes());
region.put(lockid, cols[0], vals1[k].getBytes(HConstants.UTF8_ENCODING));
region.put(lockid, cols[1], vals1[k].getBytes(HConstants.UTF8_ENCODING));
region.commit(lockid, System.currentTimeMillis());
numInserted += 2;
}
@ -300,10 +303,12 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
int curval = Integer.parseInt(new String(val).trim());
int curval =
Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
for(int j = 0; j < cols.length; j++) {
if(col.compareTo(cols[j]) == 0) {
assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
assertEquals("Error at:" + curKey.getRow() + "/"
+ curKey.getTimestamp()
+ ", Value for " + col + " should be: " + k
+ ", but was fetched as: " + curval, k, curval);
numFetched++;
@ -345,10 +350,12 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
int curval = Integer.parseInt(new String(val).trim());
int curval =
Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
for(int j = 0; j < cols.length; j++) {
if(col.compareTo(cols[j]) == 0) {
assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
assertEquals("Error at:" + curKey.getRow() + "/"
+ curKey.getTimestamp()
+ ", Value for " + col + " should be: " + k
+ ", but was fetched as: " + curval, k, curval);
numFetched++;
@ -375,8 +382,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
String kLabel = String.format("%1$03d", k);
long lockid = region.startUpdate(new Text("row_vals1_" + kLabel));
region.put(lockid, cols[0], vals1[k].getBytes());
region.put(lockid, cols[1], vals1[k].getBytes());
region.put(lockid, cols[0], vals1[k].getBytes(HConstants.UTF8_ENCODING));
region.put(lockid, cols[1], vals1[k].getBytes(HConstants.UTF8_ENCODING));
region.commit(lockid, System.currentTimeMillis());
numInserted += 2;
}
@ -398,10 +405,12 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
int curval = Integer.parseInt(new String(val).trim());
int curval =
Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
for(int j = 0; j < cols.length; j++) {
if(col.compareTo(cols[j]) == 0) {
assertEquals("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
assertEquals("Error at:" + curKey.getRow() + "/"
+ curKey.getTimestamp()
+ ", Value for " + col + " should be: " + k
+ ", but was fetched as: " + curval, k, curval);
numFetched++;
@ -443,7 +452,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
int curval = Integer.parseInt(new String(val).trim());
int curval =
Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
for (int j = 0; j < cols.length; j++) {
if (col.compareTo(cols[j]) == 0) {
assertEquals("Value for " + col + " should be: " + k
@ -480,7 +490,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
int curval = Integer.parseInt(new String(val).trim());
int curval =
Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
for (int j = 0; j < cols.length; j++) {
if (col.compareTo(cols[j]) == 0) {
assertEquals("Value for " + col + " should be: " + k
@ -529,7 +540,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
// Write to the HRegion
long writeid = region.startUpdate(new Text("row_" + k));
region.put(writeid, CONTENTS_BODY, buf1.toString().getBytes());
region.put(writeid, CONTENTS_BODY,
buf1.toString().getBytes(HConstants.UTF8_ENCODING));
region.commit(writeid, System.currentTimeMillis());
if (k > 0 && k % (N_ROWS / 100) == 0) {
System.out.println("Flushing write #" + k);
@ -656,7 +668,7 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
String curval = new String(val).trim();
String curval = new String(val, HConstants.UTF8_ENCODING).trim();
if(col.compareTo(CONTENTS_BASIC) == 0) {
assertTrue("Error at:" + curKey.getRow() + "/" + curKey.getTimestamp()
@ -709,7 +721,8 @@ public class TestHRegion extends HBaseTestCase implements RegionUnavailableListe
for(Iterator<Text> it = curVals.keySet().iterator(); it.hasNext(); ) {
Text col = it.next();
byte [] val = curVals.get(col);
int curval = Integer.parseInt(new String(val).trim());
int curval =
Integer.parseInt(new String(val, HConstants.UTF8_ENCODING).trim());
for (int j = 0; j < cols.length; j++) {
if (col.compareTo(cols[j]) == 0) {

View File

@ -79,7 +79,7 @@ public class TestHStoreFile extends TestCase {
try {
for (char d = FIRST_CHAR; d <= LAST_CHAR; d++) {
byte[] b = new byte[] {(byte)d};
Text t = new Text(new String(b));
Text t = new Text(new String(b, HConstants.UTF8_ENCODING));
writer.append(new HStoreKey(t, t, System.currentTimeMillis()),
new ImmutableBytesWritable(t.getBytes()));
}
@ -101,7 +101,7 @@ public class TestHStoreFile extends TestCase {
for (char d = FIRST_CHAR; d <= LAST_CHAR; d++) {
for (char e = FIRST_CHAR; e <= LAST_CHAR; e++) {
byte[] b = new byte[] { (byte) d, (byte) e };
Text t = new Text(new String(b));
Text t = new Text(new String(b, HConstants.UTF8_ENCODING));
writer.append(new HStoreKey(t, t, System.currentTimeMillis()),
new ImmutableBytesWritable(t.getBytes()));
}
@ -248,7 +248,7 @@ public class TestHStoreFile extends TestCase {
LOG.info("Last in top: " + key.toString());
top.getClosest(midkey, value);
// Assert value is same as key.
assertEquals(new String(value.get()),
assertEquals(new String(value.get(), HConstants.UTF8_ENCODING),
((HStoreKey) midkey).getRow().toString());
// Next test using a midkey that does not exist in the file.

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@ -58,9 +59,18 @@ public class TestScanner2 extends HBaseClusterTestCase {
final char LAST_ROWKEY = 'z';
final char FIRST_COLKEY = '0';
final char LAST_COLKEY = '3';
final byte[] GOOD_BYTES = "goodstuff".getBytes();
final byte[] BAD_BYTES = "badstuff".getBytes();
static byte[] GOOD_BYTES = null;
static byte[] BAD_BYTES = null;
static {
try {
GOOD_BYTES = "goodstuff".getBytes(HConstants.UTF8_ENCODING);
BAD_BYTES = "badstuff".getBytes(HConstants.UTF8_ENCODING);
} catch (UnsupportedEncodingException e) {
fail();
}
}
/**
* Test the scanner's handling of various filters.
*
@ -260,7 +270,8 @@ public class TestScanner2 extends HBaseClusterTestCase {
HTable t = new HTable(conf, table);
try {
long lockid = t.startUpdate(region.getRegionName());
t.put(lockid, HConstants.COL_REGIONINFO, Writables.getBytes(region.getRegionInfo()));
t.put(lockid, HConstants.COL_REGIONINFO,
Writables.getBytes(region.getRegionInfo()));
t.put(lockid, HConstants.COL_SERVER,
Writables.stringToBytes(serverAddress.toString()));
t.put(lockid, HConstants.COL_STARTCODE, Writables.longToBytes(startCode));

View File

@ -134,7 +134,8 @@ public class TestSplit extends HBaseTestCase {
// of each.
int interval = (LAST_CHAR - FIRST_CHAR) / 3;
for (HRegion r: sortedMap.values()) {
assertGet(r, COLFAMILY_NAME3, new Text(new String(b)));
assertGet(r, COLFAMILY_NAME3,
new Text(new String(b, HConstants.UTF8_ENCODING)));
b[0] += interval;
}
}

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Map;
import java.util.TreeMap;
@ -60,12 +61,20 @@ public class TestTableMapReduce extends HBaseTestCase {
private Path dir;
private MiniHBaseCluster hCluster = null;
private byte[][] values = {
"0123".getBytes(),
"abcd".getBytes(),
"wxyz".getBytes(),
"6789".getBytes()
};
private static byte[][] values = null;
static {
try {
values = new byte[][] {
"0123".getBytes(HConstants.UTF8_ENCODING),
"abcd".getBytes(HConstants.UTF8_ENCODING),
"wxyz".getBytes(HConstants.UTF8_ENCODING),
"6789".getBytes(HConstants.UTF8_ENCODING)
};
} catch (UnsupportedEncodingException e) {
fail();
}
}
/**
* {@inheritDoc}
@ -144,7 +153,8 @@ public class TestTableMapReduce extends HBaseTestCase {
// Get the original value and reverse it
String originalValue =
new String(((ImmutableBytesWritable)value.get(keys[0])).get());
new String(((ImmutableBytesWritable)value.get(keys[0])).get(),
HConstants.UTF8_ENCODING);
StringBuilder newValue = new StringBuilder();
for(int i = originalValue.length() - 1; i >= 0; i--) {
newValue.append(originalValue.charAt(i));
@ -153,8 +163,8 @@ public class TestTableMapReduce extends HBaseTestCase {
// Now set the value to be collected
MapWritable outval = new MapWritable();
outval.put(TEXT_OUTPUT_COLUMN,
new ImmutableBytesWritable(newValue.toString().getBytes()));
outval.put(TEXT_OUTPUT_COLUMN, new ImmutableBytesWritable(
newValue.toString().getBytes(HConstants.UTF8_ENCODING)));
output.collect(tKey, outval);
}
@ -297,7 +307,7 @@ public class TestTableMapReduce extends HBaseTestCase {
for(Map.Entry<Text, byte[]> e: results.entrySet()) {
LOG.info(" column: " + e.getKey() + " value: "
+ new String(e.getValue()));
+ new String(e.getValue(), HConstants.UTF8_ENCODING));
}
}

View File

@ -23,12 +23,14 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.UnsupportedEncodingException;
import java.util.Map;
import java.util.TreeMap;
import junit.framework.TestCase;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HGlobals;
import org.apache.hadoop.io.Text;
/**
@ -39,9 +41,16 @@ public class TestRegExpRowFilter extends TestCase {
RowFilterInterface mainFilter;
final char FIRST_CHAR = 'a';
final char LAST_CHAR = 'e';
byte [] GOOD_BYTES = "abc".getBytes();
final String HOST_PREFIX = "org.apache.site-";
static byte [] GOOD_BYTES = null;
static {
try {
GOOD_BYTES = "abc".getBytes(HConstants.UTF8_ENCODING);
} catch (UnsupportedEncodingException e) {
fail();
}
}
/** {@inheritDoc} */
@Override
protected void setUp() throws Exception {
@ -112,7 +121,9 @@ public class TestRegExpRowFilter extends TestCase {
yahooSite, filter.filter(new Text(yahooSite)));
}
private void regexRowColumnTests(RowFilterInterface filter) {
private void regexRowColumnTests(RowFilterInterface filter)
throws UnsupportedEncodingException {
for (char c = FIRST_CHAR; c <= LAST_CHAR; c++) {
Text t = createRow(c);
for (Map.Entry<Text, byte []> e: this.colvalues.entrySet()) {
@ -129,7 +140,7 @@ public class TestRegExpRowFilter extends TestCase {
// Do same but with bad bytes.
assertTrue("Failed with character " + c,
filter.filter(r, col, "badbytes".getBytes()));
filter.filter(r, col, "badbytes".getBytes(HConstants.UTF8_ENCODING)));
// Do with good bytes but bad column name. Should not filter out.
assertFalse("Failed with character " + c,
@ -175,7 +186,7 @@ public class TestRegExpRowFilter extends TestCase {
// that maps to a null value.
// Testing row with columnKeys: a-e, e maps to null
colvalues.put(new Text(new String(new char[] { LAST_CHAR })),
HConstants.DELETE_BYTES.get());
HGlobals.deleteBytes.get());
assertFalse("Failed with last columnKey " + LAST_CHAR + " mapping to null.",
filter.filterNotNull(colvalues));
}

View File

@ -23,10 +23,12 @@ import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.UnsupportedEncodingException;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeMap;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.io.Text;
@ -42,10 +44,19 @@ public class TestRowFilterSet extends TestCase {
static final int MAX_PAGES = 5;
final char FIRST_CHAR = 'a';
final char LAST_CHAR = 'e';
final byte[] GOOD_BYTES = "abc".getBytes();
final byte[] BAD_BYTES = "def".getBytes();
TreeMap<Text, byte[]> colvalues;
static byte[] GOOD_BYTES = null;
static byte[] BAD_BYTES = null;
static {
try {
GOOD_BYTES = "abc".getBytes(HConstants.UTF8_ENCODING);
BAD_BYTES = "def".getBytes(HConstants.UTF8_ENCODING);
} catch (UnsupportedEncodingException e) {
fail();
}
}
/** {@inheritDoc} */
@Override
protected void setUp() throws Exception {

View File

@ -48,6 +48,8 @@
*/
package org.onelab.test;
import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hbase.HConstants;
import org.onelab.filter.Key;
/**
@ -70,9 +72,10 @@ public class StringKey extends Key {
* Construct a Key using the specified String and default weight
*
* @param key String key value
* @throws UnsupportedEncodingException
*/
public StringKey(String key){
super(key.getBytes());
public StringKey(String key) throws UnsupportedEncodingException {
super(key.getBytes(HConstants.UTF8_ENCODING));
}
/**
@ -80,9 +83,12 @@ public class StringKey extends Key {
*
* @param key - String key value
* @param weight key weight
* @throws UnsupportedEncodingException
*/
public StringKey(String key, double weight){
super(key.getBytes(), weight);
public StringKey(String key, double weight)
throws UnsupportedEncodingException {
super(key.getBytes(HConstants.UTF8_ENCODING), weight);
}
}

View File

@ -48,6 +48,7 @@
*/
package org.onelab.test;
import java.io.UnsupportedEncodingException;
import junit.framework.TestCase;
import org.onelab.filter.*;
@ -61,8 +62,10 @@ import org.onelab.filter.*;
*/
public class TestFilter extends TestCase {
/** Test a BloomFilter */
public void testBloomFilter() {
/** Test a BloomFilter
* @throws UnsupportedEncodingException
*/
public void testBloomFilter() throws UnsupportedEncodingException {
Filter bf = new BloomFilter(8, 2);
Key key = new StringKey("toto");
Key k2 = new StringKey("lulu");
@ -76,8 +79,10 @@ public class TestFilter extends TestCase {
assertTrue(bf.membershipTest(new StringKey("abcd"))); // False positive
}
/** Test a CountingBloomFilter */
public void testCountingBloomFilter() {
/** Test a CountingBloomFilter
* @throws UnsupportedEncodingException
*/
public void testCountingBloomFilter() throws UnsupportedEncodingException {
Filter bf = new CountingBloomFilter(8, 2);
Key key = new StringKey("toto");
Key k2 = new StringKey("lulu");
@ -91,8 +96,10 @@ public class TestFilter extends TestCase {
assertTrue(bf.membershipTest(new StringKey("abcd"))); // False positive
}
/** Test a DynamicBloomFilter */
public void testDynamicBloomFilter() {
/** Test a DynamicBloomFilter
* @throws UnsupportedEncodingException
*/
public void testDynamicBloomFilter() throws UnsupportedEncodingException {
Filter bf = new DynamicBloomFilter(8, 2, 2);
Key key = new StringKey("toto");
Key k2 = new StringKey("lulu");