HBASE-13003 - Get tests in TestHFileBlockIndex back (Zhangduo)

This commit is contained in:
Ramkrishna 2015-02-11 10:38:54 +05:30
parent 8a6e9827a2
commit f04850810e
1 changed files with 27 additions and 18 deletions

View File

@ -42,9 +42,9 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -138,7 +139,7 @@ public class TestHFileBlockIndex {
fs = HFileSystem.get(conf); fs = HFileSystem.get(conf);
} }
protected void testBlockIndexInternals(boolean useTags) throws IOException { private void testBlockIndexInternals(boolean useTags) throws IOException {
path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags); path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags);
writeWholeIndex(useTags); writeWholeIndex(useTags);
readIndex(useTags); readIndex(useTags);
@ -186,7 +187,7 @@ public class TestHFileBlockIndex {
} }
} }
public void readIndex(boolean useTags) throws IOException { private void readIndex(boolean useTags) throws IOException {
long fileSize = fs.getFileStatus(path).getLen(); long fileSize = fs.getFileStatus(path).getLen();
LOG.info("Size of " + path + ": " + fileSize); LOG.info("Size of " + path + ": " + fileSize);
@ -216,10 +217,10 @@ public class TestHFileBlockIndex {
for (byte[] key : keys) { for (byte[] key : keys) {
assertTrue(key != null); assertTrue(key != null);
assertTrue(indexReader != null); assertTrue(indexReader != null);
HFileBlock b = indexReader.seekToDataBlock(new KeyValue.KeyOnlyKeyValue(key, 0, key.length), HFileBlock b =
null, indexReader.seekToDataBlock(new KeyValue.KeyOnlyKeyValue(key, 0, key.length), null, true,
true, true, false, null); true, false, null);
if (Bytes.BYTES_RAWCOMPARATOR.compare(key, firstKeyInFile) < 0) { if (KeyValue.COMPARATOR.compareFlatKey(key, firstKeyInFile) < 0) {
assertTrue(b == null); assertTrue(b == null);
++i; ++i;
continue; continue;
@ -260,21 +261,26 @@ public class TestHFileBlockIndex {
new HFileBlockIndex.BlockIndexWriter(hbw, null, null); new HFileBlockIndex.BlockIndexWriter(hbw, null, null);
for (int i = 0; i < NUM_DATA_BLOCKS; ++i) { for (int i = 0; i < NUM_DATA_BLOCKS; ++i) {
hbw.startWriting(BlockType.DATA).write( hbw.startWriting(BlockType.DATA).write(String.valueOf(rand.nextInt(1000)).getBytes());
String.valueOf(rand.nextInt(1000)).getBytes());
long blockOffset = outputStream.getPos(); long blockOffset = outputStream.getPos();
hbw.writeHeaderAndData(outputStream); hbw.writeHeaderAndData(outputStream);
byte[] firstKey = null; byte[] firstKey = null;
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
for (int j = 0; j < 16; ++j) { for (int j = 0; j < 16; ++j) {
byte[] k = TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j); byte[] k =
new KeyValue(TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j), family, qualifier,
EnvironmentEdgeManager.currentTime(), KeyValue.Type.Put).getKey();
keys.add(k); keys.add(k);
if (j == 8) if (j == 8) {
firstKey = k; firstKey = k;
}
} }
assertTrue(firstKey != null); assertTrue(firstKey != null);
if (firstKeyInFile == null) if (firstKeyInFile == null) {
firstKeyInFile = firstKey; firstKeyInFile = firstKey;
}
biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader()); biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader());
writeInlineBlocks(hbw, outputStream, biw, false); writeInlineBlocks(hbw, outputStream, biw, false);
@ -439,7 +445,7 @@ public class TestHFileBlockIndex {
} }
//@Test @Test
public void testBlockIndexChunk() throws IOException { public void testBlockIndexChunk() throws IOException {
BlockIndexChunk c = new BlockIndexChunk(); BlockIndexChunk c = new BlockIndexChunk();
ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -476,7 +482,7 @@ public class TestHFileBlockIndex {
} }
/** Checks if the HeapSize calculator is within reason */ /** Checks if the HeapSize calculator is within reason */
//@Test @Test
public void testHeapSizeForBlockIndex() throws IOException { public void testHeapSizeForBlockIndex() throws IOException {
Class<HFileBlockIndex.BlockIndexReader> cl = Class<HFileBlockIndex.BlockIndexReader> cl =
HFileBlockIndex.BlockIndexReader.class; HFileBlockIndex.BlockIndexReader.class;
@ -504,7 +510,7 @@ public class TestHFileBlockIndex {
* *
* @throws IOException * @throws IOException
*/ */
//@Test @Test
public void testHFileWriterAndReader() throws IOException { public void testHFileWriterAndReader() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"hfile_for_block_index"); "hfile_for_block_index");
@ -536,16 +542,19 @@ public class TestHFileBlockIndex {
.withFileContext(meta) .withFileContext(meta)
.create(); .create();
Random rand = new Random(19231737); Random rand = new Random(19231737);
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
for (int i = 0; i < NUM_KV; ++i) { for (int i = 0; i < NUM_KV; ++i) {
byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i); byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i);
// Key will be interpreted by KeyValue.KEY_COMPARATOR // Key will be interpreted by KeyValue.KEY_COMPARATOR
KeyValue kv = KeyValueUtil.createFirstOnRow(row, 0, row.length, row, 0, 0, KeyValue kv =
row, 0, 0); new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
TestHFileWriterV2.randomValue(rand));
byte[] k = kv.getKey(); byte[] k = kv.getKey();
writer.append(kv); writer.append(kv);
keys[i] = k; keys[i] = k;
values[i] = CellUtil.cloneValue(kv);
keyStrSet.add(Bytes.toStringBinary(k)); keyStrSet.add(Bytes.toStringBinary(k));
if (i > 0) { if (i > 0) {
assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1], assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1],