HBASE-4582 Store.java cleanup (failing TestHeapSize and has warnings) (jgray)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1182530 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Gray 2011-10-12 19:08:50 +00:00
parent a548c2084c
commit c2681355b5
3 changed files with 6 additions and 14 deletions

View File

@ -352,6 +352,7 @@ Release 0.92.0 - Unreleased
and regionservers (Lars H) and regionservers (Lars H)
HBASE-4555 TestShell seems passed, but actually errors seen in test output HBASE-4555 TestShell seems passed, but actually errors seen in test output
file (Mingjie Lai) file (Mingjie Lai)
HBASE-4582 Store.java cleanup (failing TestHeapSize and has warnings)
TESTS TESTS
HBASE-4450 test for number of blocks read: to serve as baseline for expected HBASE-4450 test for number of blocks read: to serve as baseline for expected

View File

@ -59,7 +59,6 @@ import org.apache.hadoop.util.StringUtils;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
/** /**
@ -133,7 +132,6 @@ public class Store implements HeapSize {
new CopyOnWriteArraySet<ChangedReadersObserver>(); new CopyOnWriteArraySet<ChangedReadersObserver>();
private final int blocksize; private final int blocksize;
private final boolean blockcache;
/** Compression algorithm for flush files and minor compaction */ /** Compression algorithm for flush files and minor compaction */
private final Compression.Algorithm compression; private final Compression.Algorithm compression;
/** Compression algorithm for major compaction */ /** Compression algorithm for major compaction */
@ -166,7 +164,6 @@ public class Store implements HeapSize {
this.region = region; this.region = region;
this.family = family; this.family = family;
this.conf = conf; this.conf = conf;
this.blockcache = family.isBlockCacheEnabled();
this.blocksize = family.getBlocksize(); this.blocksize = family.getBlocksize();
this.compression = family.getCompression(); this.compression = family.getCompression();
// avoid overriding compression setting for major compactions if the user // avoid overriding compression setting for major compactions if the user
@ -1348,7 +1345,7 @@ public class Store implements HeapSize {
this.memstore.getRowKeyAtOrBefore(state); this.memstore.getRowKeyAtOrBefore(state);
// Check if match, if we got a candidate on the asked for 'kv' row. // Check if match, if we got a candidate on the asked for 'kv' row.
// Process each store file. Run through from newest to oldest. // Process each store file. Run through from newest to oldest.
for (StoreFile sf : Iterables.reverse(storefiles)) { for (StoreFile sf : Lists.reverse(storefiles)) {
// Update the candidate keys from the current map file // Update the candidate keys from the current map file
rowAtOrBeforeFromStoreFile(sf, state); rowAtOrBeforeFromStoreFile(sf, state);
} }
@ -1387,7 +1384,7 @@ public class Store implements HeapSize {
firstOnRow = new KeyValue(lastKV.getRow(), HConstants.LATEST_TIMESTAMP); firstOnRow = new KeyValue(lastKV.getRow(), HConstants.LATEST_TIMESTAMP);
} }
// Get a scanner that caches blocks and that uses pread. // Get a scanner that caches blocks and that uses pread.
HFileScanner scanner = r.getScanner(true, true); HFileScanner scanner = r.getHFileReader().getScanner(true, true);
// Seek scanner. If can't seek it, return. // Seek scanner. If can't seek it, return.
if (!seekToScanner(scanner, firstOnRow, firstKV)) return; if (!seekToScanner(scanner, firstOnRow, firstKV)) return;
// If we found candidate on firstOnRow, just return. THIS WILL NEVER HAPPEN! // If we found candidate on firstOnRow, just return. THIS WILL NEVER HAPPEN!
@ -1793,7 +1790,7 @@ public class Store implements HeapSize {
public static final long FIXED_OVERHEAD = ClassSize.align( public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT + (17 * ClassSize.REFERENCE) + ClassSize.OBJECT + (17 * ClassSize.REFERENCE) +
(7 * Bytes.SIZEOF_LONG) + (1 * Bytes.SIZEOF_DOUBLE) + (7 * Bytes.SIZEOF_LONG) + (1 * Bytes.SIZEOF_DOUBLE) +
(6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN)); (7 * Bytes.SIZEOF_INT) + (1 * Bytes.SIZEOF_BOOLEAN));
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD + public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD +
ClassSize.OBJECT + ClassSize.REENTRANT_LOCK + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK +

View File

@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.HalfStoreFileReader;
import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
@ -121,9 +120,6 @@ public class StoreFile {
// Need to make it 8k for testing. // Need to make it 8k for testing.
public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024; public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024;
private static BlockCache hfileBlockCache = null;
private final FileSystem fs; private final FileSystem fs;
// This file's path. // This file's path.
@ -173,7 +169,6 @@ public class StoreFile {
// Used making file ids. // Used making file ids.
private final static Random rand = new Random(); private final static Random rand = new Random();
private final Configuration conf;
/** /**
* Bloom filter type specified in column family configuration. Does not * Bloom filter type specified in column family configuration. Does not
@ -206,7 +201,6 @@ public class StoreFile {
final CacheConfig cacheConf, final CacheConfig cacheConf,
final BloomType cfBloomType) final BloomType cfBloomType)
throws IOException { throws IOException {
this.conf = conf;
this.fs = fs; this.fs = fs;
this.path = p; this.path = p;
this.cacheConf = cacheConf; this.cacheConf = cacheConf;