HBASE-6226 move DataBlockEncoding and related classes to hbase-common module

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1356590 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-07-03 07:34:24 +00:00
parent b6eabd741d
commit 02cd5297ee
9 changed files with 42 additions and 30 deletions

View File

@ -18,9 +18,7 @@ package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
@ -38,9 +36,9 @@ public enum DataBlockEncoding {
/** Disable data block encoding. */
NONE(0, null),
// id 1 is reserved for the BITSET algorithm to be added later
PREFIX(2, new PrefixKeyDeltaEncoder()),
DIFF(3, new DiffKeyDeltaEncoder()),
FAST_DIFF(4, new FastDiffDeltaEncoder());
PREFIX(2, createEncoder("org.apache.hadoop.hbase.io.encoding.PrefixKeyDeltaEncoder")),
DIFF(3, createEncoder("org.apache.hadoop.hbase.io.encoding.DiffKeyDeltaEncoder")),
FAST_DIFF(4, createEncoder("org.apache.hadoop.hbase.io.encoding.FastDiffDeltaEncoder"));
private final short id;
private final byte[] idInBytes;
@ -172,4 +170,16 @@ public enum DataBlockEncoding {
return idToEncoding.get(dataBlockEncodingId);
}
protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){
try {
return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
}
}

View File

@ -17,9 +17,9 @@
package org.apache.hadoop.hbase.io.encoding;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.io.hfile.HFileBlock;
/**
* A decoding context that is created by a reader's encoder, and is shared
@ -35,16 +35,19 @@ public interface HFileBlockDecodingContext {
public Compression.Algorithm getCompression();
/**
* Perform all actions that need to be done before the encoder's real
* decoding process. Decompression needs to be done if
* {@link #getCompression()} returns a valid compression algorithm.
* Perform all actions that need to be done before the encoder's real decoding process.
* Decompression needs to be done if {@link #getCompression()} returns a valid compression
* algorithm.
*
* @param block HFile block object
* @param onDiskSizeWithoutHeader numBytes after block and encoding headers
* @param uncompressedSizeWithoutHeader numBytes without header required to store the block after
* decompressing (not decoding)
* @param blockBufferWithoutHeader ByteBuffer pointed after the header but before the data
* @param onDiskBlock on disk bytes to be decoded
* @param offset data start offset in onDiskBlock
* @throws IOException
*/
public void prepareDecoding(HFileBlock block, byte[] onDiskBlock,
int offset) throws IOException;
public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
ByteBuffer blockBufferWithoutHeader, byte[] onDiskBlock, int offset) throws IOException;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2011 The Apache Software Foundation
* Copyright The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file

View File

@ -172,8 +172,7 @@ public final class Compression {
try {
Class<?> externalCodec =
getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.SnappyCodec");
snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
conf);
snappyCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
@ -191,8 +190,7 @@ public final class Compression {
try {
Class<?> externalCodec =
getClassLoaderForCodec().loadClass("org.apache.hadoop.io.compress.Lz4Codec");
lz4Codec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec,
conf);
lz4Codec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}

View File

@ -44,17 +44,14 @@ public class HFileBlockDefaultDecodingContext implements
}
@Override
public void prepareDecoding(HFileBlock block,
byte[] onDiskBlock, int offset) throws IOException {
DataInputStream dis =
new DataInputStream(new ByteArrayInputStream(
onDiskBlock, offset,
block.getOnDiskSizeWithoutHeader()));
public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
ByteBuffer blockBufferWithoutHeader, byte[] onDiskBlock, int offset) throws IOException {
DataInputStream dis = new DataInputStream(new ByteArrayInputStream(onDiskBlock, offset,
onDiskSizeWithoutHeader));
ByteBuffer buffer = block.getBufferWithoutHeader();
Compression.decompress(buffer.array(), buffer.arrayOffset(),
(InputStream) dis, block.getOnDiskSizeWithoutHeader(),
block.getUncompressedSizeWithoutHeader(), compressAlgo);
Compression.decompress(blockBufferWithoutHeader.array(),
blockBufferWithoutHeader.arrayOffset(), (InputStream) dis, onDiskSizeWithoutHeader,
uncompressedSizeWithoutHeader, compressAlgo);
}
@Override

View File

@ -1728,9 +1728,13 @@ public class HFileBlock extends SchemaConfigured implements Cacheable {
// This will allocate a new buffer but keep header bytes.
b.allocateBuffer(nextBlockOnDiskSize > 0);
if (b.blockType.equals(BlockType.ENCODED_DATA)) {
encodedBlockDecodingCtx.prepareDecoding(b, onDiskBlock, hdrSize);
encodedBlockDecodingCtx.prepareDecoding(b.getOnDiskSizeWithoutHeader(),
b.getUncompressedSizeWithoutHeader(), b.getBufferWithoutHeader(), onDiskBlock,
hdrSize);
} else {
defaultDecodingCtx.prepareDecoding(b, onDiskBlock, hdrSize);
defaultDecodingCtx.prepareDecoding(b.getOnDiskSizeWithoutHeader(),
b.getUncompressedSizeWithoutHeader(), b.getBufferWithoutHeader(), onDiskBlock,
hdrSize);
}
if (nextBlockOnDiskSize > 0) {
// Copy next block's header bytes into the new block if we have them.