HBASE-13003 - Get tests in TestHFileBlockIndex back (Zhangduo)
This commit is contained in:
parent
c3473eb2d7
commit
23a894fc4a
|
@ -42,17 +42,18 @@ import org.apache.hadoop.fs.FSDataInputStream;
|
|||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.fs.HFileSystem;
|
||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexChunk;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileBlockIndex.BlockIndexReader;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -137,7 +138,7 @@ public class TestHFileBlockIndex {
|
|||
fs = HFileSystem.get(conf);
|
||||
}
|
||||
|
||||
protected void testBlockIndexInternals(boolean useTags) throws IOException {
|
||||
private void testBlockIndexInternals(boolean useTags) throws IOException {
|
||||
path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags);
|
||||
writeWholeIndex(useTags);
|
||||
readIndex(useTags);
|
||||
|
@ -185,7 +186,7 @@ public class TestHFileBlockIndex {
|
|||
}
|
||||
}
|
||||
|
||||
public void readIndex(boolean useTags) throws IOException {
|
||||
private void readIndex(boolean useTags) throws IOException {
|
||||
long fileSize = fs.getFileStatus(path).getLen();
|
||||
LOG.info("Size of " + path + ": " + fileSize);
|
||||
|
||||
|
@ -215,10 +216,10 @@ public class TestHFileBlockIndex {
|
|||
for (byte[] key : keys) {
|
||||
assertTrue(key != null);
|
||||
assertTrue(indexReader != null);
|
||||
HFileBlock b = indexReader.seekToDataBlock(new KeyValue.KeyOnlyKeyValue(key, 0, key.length),
|
||||
null,
|
||||
true, true, false, null);
|
||||
if (Bytes.BYTES_RAWCOMPARATOR.compare(key, firstKeyInFile) < 0) {
|
||||
HFileBlock b =
|
||||
indexReader.seekToDataBlock(new KeyValue.KeyOnlyKeyValue(key, 0, key.length), null, true,
|
||||
true, false, null);
|
||||
if (KeyValue.COMPARATOR.compareFlatKey(key, firstKeyInFile) < 0) {
|
||||
assertTrue(b == null);
|
||||
++i;
|
||||
continue;
|
||||
|
@ -259,21 +260,26 @@ public class TestHFileBlockIndex {
|
|||
new HFileBlockIndex.BlockIndexWriter(hbw, null, null);
|
||||
|
||||
for (int i = 0; i < NUM_DATA_BLOCKS; ++i) {
|
||||
hbw.startWriting(BlockType.DATA).write(
|
||||
String.valueOf(rand.nextInt(1000)).getBytes());
|
||||
hbw.startWriting(BlockType.DATA).write(String.valueOf(rand.nextInt(1000)).getBytes());
|
||||
long blockOffset = outputStream.getPos();
|
||||
hbw.writeHeaderAndData(outputStream);
|
||||
|
||||
byte[] firstKey = null;
|
||||
byte[] family = Bytes.toBytes("f");
|
||||
byte[] qualifier = Bytes.toBytes("q");
|
||||
for (int j = 0; j < 16; ++j) {
|
||||
byte[] k = TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j);
|
||||
byte[] k =
|
||||
new KeyValue(TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j), family, qualifier,
|
||||
EnvironmentEdgeManager.currentTime(), KeyValue.Type.Put).getKey();
|
||||
keys.add(k);
|
||||
if (j == 8)
|
||||
if (j == 8) {
|
||||
firstKey = k;
|
||||
}
|
||||
}
|
||||
assertTrue(firstKey != null);
|
||||
if (firstKeyInFile == null)
|
||||
if (firstKeyInFile == null) {
|
||||
firstKeyInFile = firstKey;
|
||||
}
|
||||
biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader());
|
||||
|
||||
writeInlineBlocks(hbw, outputStream, biw, false);
|
||||
|
@ -438,7 +444,7 @@ public class TestHFileBlockIndex {
|
|||
|
||||
}
|
||||
|
||||
//@Test
|
||||
@Test
|
||||
public void testBlockIndexChunk() throws IOException {
|
||||
BlockIndexChunk c = new BlockIndexChunk();
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
|
@ -475,7 +481,7 @@ public class TestHFileBlockIndex {
|
|||
}
|
||||
|
||||
/** Checks if the HeapSize calculator is within reason */
|
||||
//@Test
|
||||
@Test
|
||||
public void testHeapSizeForBlockIndex() throws IOException {
|
||||
Class<HFileBlockIndex.BlockIndexReader> cl =
|
||||
HFileBlockIndex.BlockIndexReader.class;
|
||||
|
@ -503,7 +509,7 @@ public class TestHFileBlockIndex {
|
|||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
//@Test
|
||||
@Test
|
||||
public void testHFileWriterAndReader() throws IOException {
|
||||
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
|
||||
"hfile_for_block_index");
|
||||
|
@ -535,16 +541,19 @@ public class TestHFileBlockIndex {
|
|||
.withFileContext(meta)
|
||||
.create();
|
||||
Random rand = new Random(19231737);
|
||||
|
||||
byte[] family = Bytes.toBytes("f");
|
||||
byte[] qualifier = Bytes.toBytes("q");
|
||||
for (int i = 0; i < NUM_KV; ++i) {
|
||||
byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i);
|
||||
|
||||
// Key will be interpreted by KeyValue.KEY_COMPARATOR
|
||||
KeyValue kv = KeyValueUtil.createFirstOnRow(row, 0, row.length, row, 0, 0,
|
||||
row, 0, 0);
|
||||
KeyValue kv =
|
||||
new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
|
||||
TestHFileWriterV2.randomValue(rand));
|
||||
byte[] k = kv.getKey();
|
||||
writer.append(kv);
|
||||
keys[i] = k;
|
||||
values[i] = CellUtil.cloneValue(kv);
|
||||
keyStrSet.add(Bytes.toStringBinary(k));
|
||||
if (i > 0) {
|
||||
assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1],
|
||||
|
|
Loading…
Reference in New Issue