HBASE-1615 HBASE-1597 introduced a bug when compacting after a split

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@791693 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-07-07 03:44:40 +00:00
parent b0f7d85f3b
commit 619a7243d0
6 changed files with 45 additions and 68 deletions

View File

@ -242,6 +242,8 @@ Release 0.20.0 - Unreleased
HBASE-1595 hadoop-default.xml and zoo.cfg in hbase jar
HBASE-1602 HRegionServer won't go down since we added in new LruBlockCache
HBASE-1608 TestCachedBlockQueue failing on some jvms (Jon Gray via Stack)
HBASE-1615 HBASE-1597 introduced a bug when compacting after a split
(Jon Gray via Stack)
IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage

View File

@ -80,7 +80,12 @@ public class HalfHFileReader extends HFile.Reader {
@Override
public HFileScanner getScanner() {
final HFileScanner s = super.getScanner();
return this.getScanner(true);
}
@Override
public HFileScanner getScanner(boolean cacheBlocks) {
final HFileScanner s = super.getScanner(cacheBlocks);
return new HFileScanner() {
final HFileScanner delegate = s;
public boolean atEnd = false;

View File

@ -671,7 +671,7 @@ public class HFile {
*/
@SuppressWarnings("unused")
private Reader() throws IOException {
this(null, null, null, false);
this(null, -1, null, false);
}
/**
@ -706,7 +706,7 @@ public class HFile {
this.fileSize = size;
this.istream = fsdis;
this.closeIStream = false;
this.name = this.istream.toString();
this.name = this.istream == null? "": this.istream.toString();
this.inMemory = inMemory;
}
@ -817,8 +817,20 @@ public class HFile {
* @return Scanner on this file.
*/
public HFileScanner getScanner() {
return new Scanner(this);
return new Scanner(this, true);
}
/**
* Create a Scanner on this file. No seeks or reads are done on creation.
* Call {@link HFileScanner#seekTo(byte[])} to position an start the read.
* There is nothing to clean up in a Scanner. Letting go of your references
* to the scanner is sufficient.
* @return Scanner on this file.
*/
public HFileScanner getScanner(boolean cacheBlocks) {
return new Scanner(this, cacheBlocks);
}
/**
* @param key Key to search.
* @return Block number of the block containing the key or -1 if not in this
@ -873,7 +885,7 @@ public class HFile {
* @return Block wrapped in a ByteBuffer.
* @throws IOException
*/
ByteBuffer readBlock(int block) throws IOException {
ByteBuffer readBlock(int block, boolean cacheBlock) throws IOException {
if (blockIndex == null) {
throw new IOException("Block index not loaded");
}
@ -926,25 +938,13 @@ public class HFile {
buf.rewind();
// Cache the block
cacheBlock(name + block, buf.duplicate());
if(cacheBlock && cache != null) {
cache.cacheBlock(name + block, buf.duplicate(), inMemory);
}
return buf;
}
}
/**
* Cache this block if there is a block cache available.<p>
*
* Makes a copy of the ByteBuffer, not the one we are sending back, so the
* position does not get messed up.
* @param blockName
* @param buf
*/
void cacheBlock(String blockName, ByteBuffer buf) {
if (cache != null) {
cache.cacheBlock(blockName, buf.duplicate(), inMemory);
}
}
/*
* Decompress <code>compressedSize</code> bytes off the backing
@ -1041,6 +1041,8 @@ public class HFile {
private final Reader reader;
private ByteBuffer block;
private int currBlock;
private boolean cacheBlocks = false;
private int currKeyLen = 0;
private int currValueLen = 0;
@ -1051,6 +1053,11 @@ public class HFile {
this.reader = r;
}
public Scanner(Reader r, boolean cacheBlocks) {
this.reader = r;
this.cacheBlocks = cacheBlocks;
}
public KeyValue getKeyValue() {
if(this.block == null) {
return null;
@ -1099,7 +1106,7 @@ public class HFile {
block = null;
return false;
}
block = reader.readBlock(currBlock);
block = reader.readBlock(currBlock, cacheBlocks);
currKeyLen = block.getInt();
currValueLen = block.getInt();
blockFetches++;
@ -1230,7 +1237,7 @@ public class HFile {
currValueLen = block.getInt();
}
currBlock = 0;
block = reader.readBlock(currBlock);
block = reader.readBlock(currBlock, cacheBlocks);
currKeyLen = block.getInt();
currValueLen = block.getInt();
blockFetches++;
@ -1239,12 +1246,12 @@ public class HFile {
private void loadBlock(int bloc) throws IOException {
if (block == null) {
block = reader.readBlock(bloc);
block = reader.readBlock(bloc, cacheBlocks);
currBlock = bloc;
blockFetches++;
} else {
if (bloc != currBlock) {
block = reader.readBlock(bloc);
block = reader.readBlock(bloc, cacheBlocks);
currBlock = bloc;
blockFetches++;
} else {
@ -1260,35 +1267,6 @@ public class HFile {
}
}
/**
* HFile Reader that does not cache blocks that were not already cached.<p>
*
* Used for compactions.
*/
public static class CompactionReader extends Reader {
public CompactionReader(Reader reader) {
super(reader.istream, reader.fileSize, reader.cache, reader.inMemory);
super.blockIndex = reader.blockIndex;
super.trailer = reader.trailer;
super.lastkey = reader.lastkey;
super.avgKeyLen = reader.avgKeyLen;
super.avgValueLen = reader.avgValueLen;
super.comparator = reader.comparator;
super.metaIndex = reader.metaIndex;
super.fileInfoLoaded = reader.fileInfoLoaded;
super.compressAlgo = reader.compressAlgo;
}
/**
* Do not cache this block when doing a compaction.
*/
@Override
void cacheBlock(String blockName, ByteBuffer buf) {
return;
}
}
/*
* The RFile has a fixed trailer which contains offsets to other variable
* parts of the file. Also includes basic metadata on this file.

View File

@ -54,7 +54,6 @@ import org.apache.hadoop.hbase.io.SequenceFile;
import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.io.hfile.HFile.CompactionReader;
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@ -681,12 +680,12 @@ public class Store implements HConstants, HeapSize {
LOG.warn("Path is null for " + file);
return null;
}
CompactionReader r = file.getCompactionReader();
Reader r = file.getReader();
if (r == null) {
LOG.warn("StoreFile " + file + " has a null Reader");
continue;
}
long len = file.getCompactionReader().length();
long len = file.getReader().length();
fileSizes[i] = len;
totalSize += len;
}
@ -845,12 +844,13 @@ public class Store implements HConstants, HeapSize {
// init:
for (int i = 0; i < filesToCompact.size(); ++i) {
// TODO open a new HFile.Reader w/o block cache.
CompactionReader r = filesToCompact.get(i).getCompactionReader();
Reader r = filesToCompact.get(i).getReader();
if (r == null) {
LOG.warn("StoreFile " + filesToCompact.get(i) + " has a null Reader");
continue;
}
scanners[i] = new StoreFileScanner(r.getScanner());
// Instantiate HFile.Reader.Scanner to not cache blocks
scanners[i] = new StoreFileScanner(r.getScanner(false));
}
if (majorCompaction) {
@ -960,7 +960,7 @@ public class Store implements HConstants, HeapSize {
// 4. Compute new store size
this.storeSize = 0L;
for (StoreFile hsf : this.storefiles.values()) {
Reader r = hsf.getCompactionReader();
Reader r = hsf.getReader();
if (r == null) {
LOG.warn("StoreFile " + hsf + " has a null Reader");
continue;

View File

@ -309,14 +309,6 @@ public class StoreFile implements HConstants {
return this.reader;
}
/**
* Gets a special Reader for use during compactions. Will not cache blocks.
* @return Current reader. Must call open first else returns null.
*/
public HFile.CompactionReader getCompactionReader() {
return new HFile.CompactionReader(this.reader);
}
/**
* @throws IOException
*/

View File

@ -169,7 +169,7 @@ public class TestCompaction extends HBaseTestCase {
boolean containsStartRow = false;
for (StoreFile f: this.r.stores.get(COLUMN_FAMILY_TEXT).getStorefiles().
values()) {
HFileScanner scanner = f.getCompactionReader().getScanner();
HFileScanner scanner = f.getReader().getScanner(false);
scanner.seekTo();
do {
byte [] row = scanner.getKeyValue().getRow();