HBASE-1245 hfile meta block handling bugs

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@750784 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-03-06 06:47:04 +00:00
parent 1d2810b167
commit 094a2030f0
4 changed files with 22 additions and 4 deletions

View File

@ -34,6 +34,7 @@ Release 0.20.0 - Unreleased
HBASE-1239 in the REST interface does not correctly clear the character
buffer each iteration-1185 wrong request/sec in the gui
reporting wrong (Brian Beggs via Stack)
HBASE-1245 hfile meta block handling bugs (Ryan Rawson via Stack)
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage

View File

@ -713,7 +713,7 @@ public class HFile {
// Read in the metadata index.
if (trailer.metaIndexCount > 0) {
this.metaIndex = BlockIndex.readIndex(this.comparator,
this.metaIndex = BlockIndex.readIndex(Bytes.BYTES_RAWCOMPARATOR,
this.istream, this.trailer.metaIndexOffset, trailer.metaIndexCount);
}
this.fileInfoLoaded = true;
@ -784,6 +784,9 @@ public class HFile {
* @throws IOException
*/
public ByteBuffer getMetaBlock(String metaBlockName) throws IOException {
if (trailer.metaIndexCount == 0) {
return null; // there are no meta blocks
}
if (metaIndex == null) {
throw new IOException("Meta index not loaded");
}

View File

@ -44,6 +44,7 @@ public class Bytes {
* Estimate based on study of jhat and jprofiler numbers.
*/
// JHat says BU is 56 bytes.
// SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
public static final int ESTIMATED_HEAP_TAX = 16;
/**

View File

@ -169,7 +169,7 @@ public class TestHFile extends TestCase {
private void writeNumMetablocks(Writer writer, int n) {
for (int i = 0; i < n; i++) {
writer.appendMetaBlock("TfileMeta" + i, ("something to test" + i).getBytes());
writer.appendMetaBlock("HFileMeta" + i, ("something to test" + i).getBytes());
}
}
@ -179,7 +179,7 @@ public class TestHFile extends TestCase {
private void readNumMetablocks(Reader reader, int n) throws IOException {
for (int i = 0; i < n; i++) {
ByteBuffer b = reader.getMetaBlock("TfileMeta" + i);
ByteBuffer b = reader.getMetaBlock("HFileMeta" + i);
byte [] found = Bytes.toBytes(b);
assertTrue("failed to match metadata", Arrays.equals(
("something to test" + i).getBytes(), found));
@ -191,7 +191,7 @@ public class TestHFile extends TestCase {
}
private void metablocks(final String compress) throws Exception {
Path mFile = new Path(ROOT_DIR, "meta.tfile");
Path mFile = new Path(ROOT_DIR, "meta.hfile");
FSDataOutputStream fout = createFSOutput(mFile);
Writer writer = new Writer(fout, minBlockSize,
Compression.getCompressionAlgorithmByName(compress), null, false);
@ -216,6 +216,19 @@ public class TestHFile extends TestCase {
metablocks("gz");
}
public void testNullMetaBlocks() throws Exception {
Path mFile = new Path(ROOT_DIR, "nometa.hfile");
FSDataOutputStream fout = createFSOutput(mFile);
Writer writer = new Writer(fout, minBlockSize,
Compression.Algorithm.NONE, null, false);
writer.append("foo".getBytes(), "value".getBytes());
writer.close();
fout.close();
Reader reader = new Reader(fs, mFile, null);
reader.loadFileInfo();
assertNull(reader.getMetaBlock("non-existant"));
}
/**
* Make sure the orginals for our compression libs doesn't change on us.
*/