HBASE-4459 HbaseObjectWritable code is a byte, we will eventually run out of codes

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1186975 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
ramkrishna 2011-10-20 18:19:44 +00:00
parent dd6460d183
commit 8a0699e585
3 changed files with 67 additions and 19 deletions

View File

@ -377,6 +377,7 @@ Release 0.92.0 - Unreleased
HBASE-4621 TestAvroServer fails quite often intermittently (Akash Ashok) HBASE-4621 TestAvroServer fails quite often intermittently (Akash Ashok)
HBASE-4378 [hbck] Does not complain about regions with startkey==endkey. HBASE-4378 [hbck] Does not complain about regions with startkey==endkey.
(Jonathan Hsieh) (Jonathan Hsieh)
HBASE-4459 HbaseObjectWritable code is a byte, we will eventually run out of codes
TESTS TESTS
HBASE-4450 test for number of blocks read: to serve as baseline for expected HBASE-4450 test for number of blocks read: to serve as baseline for expected

View File

@ -93,6 +93,7 @@ import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableUtils;
/** /**
* This is a customized version of the polymorphic hadoop * This is a customized version of the polymorphic hadoop
@ -115,15 +116,15 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
// Here we maintain two static maps of classes to code and vice versa. // Here we maintain two static maps of classes to code and vice versa.
// Add new classes+codes as wanted or figure way to auto-generate these // Add new classes+codes as wanted or figure way to auto-generate these
// maps from the HMasterInterface. // maps from the HMasterInterface.
static final Map<Byte, Class<?>> CODE_TO_CLASS = static final Map<Integer, Class<?>> CODE_TO_CLASS =
new HashMap<Byte, Class<?>>(); new HashMap<Integer, Class<?>>();
static final Map<Class<?>, Byte> CLASS_TO_CODE = static final Map<Class<?>, Integer> CLASS_TO_CODE =
new HashMap<Class<?>, Byte>(); new HashMap<Class<?>, Integer>();
// Special code that means 'not-encoded'; in this case we do old school // Special code that means 'not-encoded'; in this case we do old school
// sending of the class name using reflection, etc. // sending of the class name using reflection, etc.
private static final byte NOT_ENCODED = 0; private static final byte NOT_ENCODED = 0;
static { static {
byte code = NOT_ENCODED + 1; int code = NOT_ENCODED + 1;
// Primitive types. // Primitive types.
addToMap(Boolean.TYPE, code++); addToMap(Boolean.TYPE, code++);
addToMap(Byte.TYPE, code++); addToMap(Byte.TYPE, code++);
@ -240,7 +241,6 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
addToMap(RegionOpeningState.class, code++); addToMap(RegionOpeningState.class, code++);
addToMap(Append.class, code++); addToMap(Append.class, code++);
} }
private Class<?> declaredClass; private Class<?> declaredClass;
@ -320,7 +320,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
} }
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
this.declaredClass = CODE_TO_CLASS.get(in.readByte()); this.declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
} }
public void write(DataOutput out) throws IOException { public void write(DataOutput out) throws IOException {
@ -336,7 +336,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
*/ */
static void writeClassCode(final DataOutput out, final Class<?> c) static void writeClassCode(final DataOutput out, final Class<?> c)
throws IOException { throws IOException {
Byte code = CLASS_TO_CODE.get(c); Integer code = CLASS_TO_CODE.get(c);
if (code == null ) { if (code == null ) {
if ( List.class.isAssignableFrom(c)) { if ( List.class.isAssignableFrom(c)) {
code = CLASS_TO_CODE.get(List.class); code = CLASS_TO_CODE.get(List.class);
@ -354,11 +354,9 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
for(StackTraceElement elem : els) { for(StackTraceElement elem : els) {
LOG.error(elem.getMethodName()); LOG.error(elem.getMethodName());
} }
// new Exception().getStackTrace()[0].getMethodName());
// throw new IOException(new Exception().getStackTrace()[0].getMethodName());
throw new UnsupportedOperationException("No code for unexpected " + c); throw new UnsupportedOperationException("No code for unexpected " + c);
} }
out.writeByte(code); WritableUtils.writeVInt(out, code);
} }
@ -455,7 +453,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
Text.writeString(out, ((Enum)instanceObj).name()); Text.writeString(out, ((Enum)instanceObj).name());
} else if (Writable.class.isAssignableFrom(declClass)) { // Writable } else if (Writable.class.isAssignableFrom(declClass)) { // Writable
Class <?> c = instanceObj.getClass(); Class <?> c = instanceObj.getClass();
Byte code = CLASS_TO_CODE.get(c); Integer code = CLASS_TO_CODE.get(c);
if (code == null) { if (code == null) {
out.writeByte(NOT_ENCODED); out.writeByte(NOT_ENCODED);
Text.writeString(out, c.getName()); Text.writeString(out, c.getName());
@ -465,7 +463,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
((Writable)instanceObj).write(out); ((Writable)instanceObj).write(out);
} else if (Serializable.class.isAssignableFrom(declClass)) { } else if (Serializable.class.isAssignableFrom(declClass)) {
Class <?> c = instanceObj.getClass(); Class <?> c = instanceObj.getClass();
Byte code = CLASS_TO_CODE.get(c); Integer code = CLASS_TO_CODE.get(c);
if (code == null) { if (code == null) {
out.writeByte(NOT_ENCODED); out.writeByte(NOT_ENCODED);
Text.writeString(out, c.getName()); Text.writeString(out, c.getName());
@ -517,7 +515,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
public static Object readObject(DataInput in, public static Object readObject(DataInput in,
HbaseObjectWritable objectWritable, Configuration conf) HbaseObjectWritable objectWritable, Configuration conf)
throws IOException { throws IOException {
Class<?> declaredClass = CODE_TO_CLASS.get(in.readByte()); Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
Object instance; Object instance;
if (declaredClass.isPrimitive()) { // primitive types if (declaredClass.isPrimitive()) { // primitive types
if (declaredClass == Boolean.TYPE) { // boolean if (declaredClass == Boolean.TYPE) { // boolean
@ -566,8 +564,8 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
Text.readString(in)); Text.readString(in));
} else { // Writable or Serializable } else { // Writable or Serializable
Class instanceClass = null; Class instanceClass = null;
Byte b = in.readByte(); int b = (byte)WritableUtils.readVInt(in);
if (b.byteValue() == NOT_ENCODED) { if (b == NOT_ENCODED) {
String className = Text.readString(in); String className = Text.readString(in);
try { try {
instanceClass = getClassByName(conf, className); instanceClass = getClassByName(conf, className);
@ -630,7 +628,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
return Class.forName(className, true, cl); return Class.forName(className, true, cl);
} }
private static void addToMap(final Class<?> clazz, final byte code) { private static void addToMap(final Class<?> clazz, final int code) {
CLASS_TO_CODE.put(clazz, code); CLASS_TO_CODE.put(clazz, code);
CODE_TO_CLASS.put(code, clazz); CODE_TO_CLASS.put(code, clazz);
} }

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableComparator;
@ -49,6 +50,54 @@ public class TestHbaseObjectWritable extends TestCase {
super.tearDown(); super.tearDown();
} }
@SuppressWarnings("boxing")
public void testReadOldObjectDataInput() throws IOException {
Configuration conf = HBaseConfiguration.create();
/*
* This is the code used to generate byte[] where
* HbaseObjectWritable used byte for code
*
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(byteStream);
HbaseObjectWritable.writeObject(out, bytes, byte[].class, conf);
byte[] ba = byteStream.toByteArray();
out.close();
*/
/*
* byte array generated by the folowing call
* HbaseObjectWritable.writeObject(out, new Text("Old"), Text.class, conf);
*/
byte[] baForText = {13, 13, 3, 79, 108, 100};
Text txt = (Text)readByteArray(conf, baForText);
Text oldTxt = new Text("Old");
assertEquals(txt, oldTxt);
final byte A = 'A';
byte [] bytes = new byte[1];
bytes[0] = A;
/*
* byte array generated by the folowing call
* HbaseObjectWritable.writeObject(out, bytes, byte[].class, conf);
*/
byte[] baForByteArray = { 11, 1, 65 };
byte[] baOut = (byte[])readByteArray(conf, baForByteArray);
assertTrue(Bytes.equals(baOut, bytes));
}
/*
* helper method which reads byte array using HbaseObjectWritable.readObject()
*/
private Object readByteArray(final Configuration conf, final byte[] ba)
throws IOException {
ByteArrayInputStream bais =
new ByteArrayInputStream(ba);
DataInputStream dis = new DataInputStream(bais);
Object product = HbaseObjectWritable.readObject(dis, conf);
dis.close();
return product;
}
@SuppressWarnings("boxing") @SuppressWarnings("boxing")
public void testReadObjectDataInputConfiguration() throws IOException { public void testReadObjectDataInputConfiguration() throws IOException {
Configuration conf = HBaseConfiguration.create(); Configuration conf = HBaseConfiguration.create();