diff --git a/CHANGES.txt b/CHANGES.txt index e6b71c927ce..2a59484f80e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -377,6 +377,7 @@ Release 0.92.0 - Unreleased HBASE-4621 TestAvroServer fails quite often intermittently (Akash Ashok) HBASE-4378 [hbck] Does not complain about regions with startkey==endkey. (Jonathan Hsieh) + HBASE-4459 HbaseObjectWritable code is a byte, we will eventually run out of codes TESTS HBASE-4450 test for number of blocks read: to serve as baseline for expected diff --git a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java index 73590315934..03b19c441b3 100644 --- a/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java +++ b/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java @@ -93,6 +93,7 @@ import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; +import org.apache.hadoop.io.WritableUtils; /** * This is a customized version of the polymorphic hadoop @@ -115,15 +116,15 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur // Here we maintain two static maps of classes to code and vice versa. // Add new classes+codes as wanted or figure way to auto-generate these // maps from the HMasterInterface. - static final Map> CODE_TO_CLASS = - new HashMap>(); - static final Map, Byte> CLASS_TO_CODE = - new HashMap, Byte>(); + static final Map> CODE_TO_CLASS = + new HashMap>(); + static final Map, Integer> CLASS_TO_CODE = + new HashMap, Integer>(); // Special code that means 'not-encoded'; in this case we do old school // sending of the class name using reflection, etc. private static final byte NOT_ENCODED = 0; static { - byte code = NOT_ENCODED + 1; + int code = NOT_ENCODED + 1; // Primitive types. addToMap(Boolean.TYPE, code++); addToMap(Byte.TYPE, code++); @@ -240,7 +241,6 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur addToMap(RegionOpeningState.class, code++); addToMap(Append.class, code++); - } private Class declaredClass; @@ -320,7 +320,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur } public void readFields(DataInput in) throws IOException { - this.declaredClass = CODE_TO_CLASS.get(in.readByte()); + this.declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); } public void write(DataOutput out) throws IOException { @@ -336,7 +336,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur */ static void writeClassCode(final DataOutput out, final Class c) throws IOException { - Byte code = CLASS_TO_CODE.get(c); + Integer code = CLASS_TO_CODE.get(c); if (code == null ) { if ( List.class.isAssignableFrom(c)) { code = CLASS_TO_CODE.get(List.class); @@ -354,11 +354,9 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur for(StackTraceElement elem : els) { LOG.error(elem.getMethodName()); } -// new Exception().getStackTrace()[0].getMethodName()); -// throw new IOException(new Exception().getStackTrace()[0].getMethodName()); throw new UnsupportedOperationException("No code for unexpected " + c); } - out.writeByte(code); + WritableUtils.writeVInt(out, code); } @@ -455,7 +453,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur Text.writeString(out, ((Enum)instanceObj).name()); } else if (Writable.class.isAssignableFrom(declClass)) { // Writable Class c = instanceObj.getClass(); - Byte code = CLASS_TO_CODE.get(c); + Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); @@ -465,7 +463,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur ((Writable)instanceObj).write(out); } else if (Serializable.class.isAssignableFrom(declClass)) { Class c = instanceObj.getClass(); - Byte code = CLASS_TO_CODE.get(c); + Integer code = CLASS_TO_CODE.get(c); if (code == null) { out.writeByte(NOT_ENCODED); Text.writeString(out, c.getName()); @@ -517,7 +515,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur public static Object readObject(DataInput in, HbaseObjectWritable objectWritable, Configuration conf) throws IOException { - Class declaredClass = CODE_TO_CLASS.get(in.readByte()); + Class declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in)); Object instance; if (declaredClass.isPrimitive()) { // primitive types if (declaredClass == Boolean.TYPE) { // boolean @@ -553,7 +551,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur Array.set(instance, i, readObject(in, conf)); } } - } else if (List.class.isAssignableFrom(declaredClass)) { // List + } else if (List.class.isAssignableFrom(declaredClass)) { // List int length = in.readInt(); instance = new ArrayList(length); for (int i = 0; i < length; i++) { @@ -566,8 +564,8 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur Text.readString(in)); } else { // Writable or Serializable Class instanceClass = null; - Byte b = in.readByte(); - if (b.byteValue() == NOT_ENCODED) { + int b = (byte)WritableUtils.readVInt(in); + if (b == NOT_ENCODED) { String className = Text.readString(in); try { instanceClass = getClassByName(conf, className); @@ -630,7 +628,7 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur return Class.forName(className, true, cl); } - private static void addToMap(final Class clazz, final byte code) { + private static void addToMap(final Class clazz, final int code) { CLASS_TO_CODE.put(clazz, code); CODE_TO_CLASS.put(code, clazz); } diff --git a/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java b/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java index 6bce5cd3e94..d9ff5d3678a 100644 --- a/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java +++ b/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparator; @@ -49,6 +50,54 @@ public class TestHbaseObjectWritable extends TestCase { super.tearDown(); } + @SuppressWarnings("boxing") + public void testReadOldObjectDataInput() throws IOException { + Configuration conf = HBaseConfiguration.create(); + /* + * This is the code used to generate byte[] where + * HbaseObjectWritable used byte for code + * + ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); + DataOutputStream out = new DataOutputStream(byteStream); + HbaseObjectWritable.writeObject(out, bytes, byte[].class, conf); + byte[] ba = byteStream.toByteArray(); + out.close(); + */ + + /* + * byte array generated by the folowing call + * HbaseObjectWritable.writeObject(out, new Text("Old"), Text.class, conf); + */ + byte[] baForText = {13, 13, 3, 79, 108, 100}; + Text txt = (Text)readByteArray(conf, baForText); + Text oldTxt = new Text("Old"); + assertEquals(txt, oldTxt); + + final byte A = 'A'; + byte [] bytes = new byte[1]; + bytes[0] = A; + /* + * byte array generated by the folowing call + * HbaseObjectWritable.writeObject(out, bytes, byte[].class, conf); + */ + byte[] baForByteArray = { 11, 1, 65 }; + byte[] baOut = (byte[])readByteArray(conf, baForByteArray); + assertTrue(Bytes.equals(baOut, bytes)); + } + + /* + * helper method which reads byte array using HbaseObjectWritable.readObject() + */ + private Object readByteArray(final Configuration conf, final byte[] ba) + throws IOException { + ByteArrayInputStream bais = + new ByteArrayInputStream(ba); + DataInputStream dis = new DataInputStream(bais); + Object product = HbaseObjectWritable.readObject(dis, conf); + dis.close(); + return product; + } + @SuppressWarnings("boxing") public void testReadObjectDataInputConfiguration() throws IOException { Configuration conf = HBaseConfiguration.create(); @@ -212,4 +261,4 @@ public class TestHbaseObjectWritable extends TestCase { this.key = Text.readString(in); } } -} \ No newline at end of file +}