HBASE-6919 Remove unnecessary throws IOException from Bytes.readVLong.

Added readAsVLong() to deprecate readVLong() which was throwing IOException. Added test for readAsVLong().

Signed-off-by: Sean Busbey <busbey@apache.org>
This commit is contained in:
Apekshit(Appy) Sharma 2015-04-03 17:23:27 -07:00 committed by Sean Busbey
parent 6c22333599
commit e252c30645
3 changed files with 34 additions and 16 deletions

View File

@ -1364,14 +1364,26 @@ public class Bytes implements Comparable<Bytes> {
}
/**
* Reads a zero-compressed encoded long from input stream and returns it.
* Reads a zero-compressed encoded long from input buffer and returns it.
* @param buffer Binary array
* @param offset Offset into array at which vint begins.
* @throws java.io.IOException e
* @return deserialized long from stream.
* @return deserialized long from buffer.
* @deprecated Use {@link #readAsVLong()} instead.
*/
@Deprecated
public static long readVLong(final byte [] buffer, final int offset)
throws IOException {
return readAsVLong(buffer, offset);
}
/**
* Reads a zero-compressed encoded long from input buffer and returns it.
* @param buffer Binary array
* @param offset Offset into array at which vint begins.
* @return deserialized long from buffer.
*/
public static long readAsVLong(final byte [] buffer, final int offset) {
byte firstByte = buffer[offset];
int len = WritableUtils.decodeVIntSize(firstByte);
if (len == 1) {

View File

@ -31,6 +31,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.io.WritableUtils;
import org.junit.Assert;
import org.junit.experimental.categories.Category;
@ -213,6 +214,19 @@ public class TestBytes extends TestCase {
assertEquals(7, target.limit());
}
public void testReadAsVLong() throws Exception {
long [] longs = {-1l, 123l, Long.MIN_VALUE, Long.MAX_VALUE};
for (int i = 0; i < longs.length; i++) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream output = new DataOutputStream(baos);
WritableUtils.writeVLong(output, longs[i]);
byte[] long_bytes_no_offset = baos.toByteArray();
assertEquals(longs[i], Bytes.readAsVLong(long_bytes_no_offset, 0));
byte[] long_bytes_with_offset = bytesWithOffset(long_bytes_no_offset);
assertEquals(longs[i], Bytes.readAsVLong(long_bytes_with_offset, 1));
}
}
public void testToStringBinaryForBytes() {
byte[] array = { '0', '9', 'a', 'z', 'A', 'Z', '@', 1 };
String actual = Bytes.toStringBinary(array);

View File

@ -585,13 +585,9 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
}
if (this.reader.shouldIncludeMemstoreTS()) {
if (this.reader.isDecodeMemstoreTS()) {
try {
memstoreTS = Bytes.readVLong(blockBuffer.array(), blockBuffer.arrayOffset()
+ blockBuffer.position());
memstoreTSLen = WritableUtils.getVIntSize(memstoreTS);
} catch (Exception e) {
throw new RuntimeException("Error reading memstore timestamp", e);
}
memstoreTS = Bytes.readAsVLong(blockBuffer.array(), blockBuffer.arrayOffset()
+ blockBuffer.position());
memstoreTSLen = WritableUtils.getVIntSize(memstoreTS);
} else {
memstoreTS = 0;
memstoreTSLen = 1;
@ -973,13 +969,9 @@ public class HFileReaderImpl implements HFile.Reader, Configurable {
protected void readMvccVersion() {
if (this.reader.shouldIncludeMemstoreTS()) {
if (this.reader.isDecodeMemstoreTS()) {
try {
currMemstoreTS = Bytes.readVLong(blockBuffer.array(), blockBuffer.arrayOffset()
+ blockBuffer.position());
currMemstoreTSLen = WritableUtils.getVIntSize(currMemstoreTS);
} catch (Exception e) {
throw new RuntimeException("Error reading memstore timestamp", e);
}
currMemstoreTS = Bytes.readAsVLong(blockBuffer.array(), blockBuffer.arrayOffset()
+ blockBuffer.position());
currMemstoreTSLen = WritableUtils.getVIntSize(currMemstoreTS);
} else {
currMemstoreTS = 0;
currMemstoreTSLen = 1;