HBASE-1719 hold a reference to the region in stores instead of only the region info
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@799074 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ea183fe272
commit
21d54c0c8c
|
@ -533,6 +533,8 @@ Release 0.20.0 - Unreleased
|
||||||
HBASE-1683 OOME on master splitting logs; stuck, won't go down
|
HBASE-1683 OOME on master splitting logs; stuck, won't go down
|
||||||
HBASE-1704 Better zk error when failed connect
|
HBASE-1704 Better zk error when failed connect
|
||||||
HBASE-1714 Thrift server: prefix scan API
|
HBASE-1714 Thrift server: prefix scan API
|
||||||
|
HBASE-1719 hold a reference to the region in stores instead of only the
|
||||||
|
region info
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
HBASE-1412 Change values for delete column and column family in KeyValue
|
HBASE-1412 Change values for delete column and column family in KeyValue
|
||||||
|
|
|
@ -69,7 +69,6 @@ public class HRegionInfo extends VersionedWritable implements WritableComparable
|
||||||
//TODO: Move NO_HASH to HStoreFile which is really the only place it is used.
|
//TODO: Move NO_HASH to HStoreFile which is really the only place it is used.
|
||||||
public static final int NO_HASH = -1;
|
public static final int NO_HASH = -1;
|
||||||
private volatile int encodedName = NO_HASH;
|
private volatile int encodedName = NO_HASH;
|
||||||
private boolean splitRequest = false;
|
|
||||||
|
|
||||||
private void setHashCode() {
|
private void setHashCode() {
|
||||||
int result = Arrays.hashCode(this.regionName);
|
int result = Arrays.hashCode(this.regionName);
|
||||||
|
@ -456,17 +455,6 @@ public class HRegionInfo extends VersionedWritable implements WritableComparable
|
||||||
return Bytes.compareTo(this.endKey, o.endKey);
|
return Bytes.compareTo(this.endKey, o.endKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* For internal use in forcing splits ahead of file size limit.
|
|
||||||
* @param b
|
|
||||||
* @return previous value
|
|
||||||
*/
|
|
||||||
public boolean shouldSplit(boolean b) {
|
|
||||||
boolean old = this.splitRequest;
|
|
||||||
this.splitRequest = b;
|
|
||||||
return old;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Comparator to use comparing {@link KeyValue}s.
|
* @return Comparator to use comparing {@link KeyValue}s.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -199,6 +199,7 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
|
||||||
new ReentrantReadWriteLock();
|
new ReentrantReadWriteLock();
|
||||||
private final Object splitLock = new Object();
|
private final Object splitLock = new Object();
|
||||||
private long minSequenceId;
|
private long minSequenceId;
|
||||||
|
private boolean splitRequest;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Name of the region info file that resides just under the region directory.
|
* Name of the region info file that resides just under the region directory.
|
||||||
|
@ -1507,7 +1508,7 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
|
||||||
protected Store instantiateHStore(Path baseDir,
|
protected Store instantiateHStore(Path baseDir,
|
||||||
HColumnDescriptor c, Path oldLogFile, Progressable reporter)
|
HColumnDescriptor c, Path oldLogFile, Progressable reporter)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new Store(baseDir, this.regionInfo, c, this.fs, oldLogFile,
|
return new Store(baseDir, this, c, this.fs, oldLogFile,
|
||||||
this.conf, reporter);
|
this.conf, reporter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2449,6 +2450,17 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For internal use in forcing splits ahead of file size limit.
|
||||||
|
* @param b
|
||||||
|
* @return previous value
|
||||||
|
*/
|
||||||
|
public boolean shouldSplit(boolean b) {
|
||||||
|
boolean old = this.splitRequest;
|
||||||
|
this.splitRequest = b;
|
||||||
|
return old;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Facility for dumping and compacting catalog tables.
|
* Facility for dumping and compacting catalog tables.
|
||||||
* Only does catalog tables since these are only tables we for sure know
|
* Only does catalog tables since these are only tables we for sure know
|
||||||
|
|
|
@ -1482,7 +1482,7 @@ public class HRegionServer implements HConstants, HRegionInterface,
|
||||||
case MSG_REGION_SPLIT:
|
case MSG_REGION_SPLIT:
|
||||||
region = getRegion(info.getRegionName());
|
region = getRegion(info.getRegionName());
|
||||||
region.flushcache();
|
region.flushcache();
|
||||||
region.regionInfo.shouldSplit(true);
|
region.shouldSplit(true);
|
||||||
// force a compaction; split will be side-effect.
|
// force a compaction; split will be side-effect.
|
||||||
compactSplitThread.compactionRequested(region,
|
compactSplitThread.compactionRequested(region,
|
||||||
e.msg.getType().name());
|
e.msg.getType().name());
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class Store implements HConstants, HeapSize {
|
||||||
protected final MemStore memstore;
|
protected final MemStore memstore;
|
||||||
// This stores directory in the filesystem.
|
// This stores directory in the filesystem.
|
||||||
private final Path homedir;
|
private final Path homedir;
|
||||||
private final HRegionInfo regioninfo;
|
private final HRegion region;
|
||||||
private final HColumnDescriptor family;
|
private final HColumnDescriptor family;
|
||||||
final FileSystem fs;
|
final FileSystem fs;
|
||||||
private final HBaseConfiguration conf;
|
private final HBaseConfiguration conf;
|
||||||
|
@ -154,13 +154,14 @@ public class Store implements HConstants, HeapSize {
|
||||||
* failed. Can be null.
|
* failed. Can be null.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
protected Store(Path basedir, HRegionInfo info, HColumnDescriptor family,
|
protected Store(Path basedir, HRegion region, HColumnDescriptor family,
|
||||||
FileSystem fs, Path reconstructionLog, HBaseConfiguration conf,
|
FileSystem fs, Path reconstructionLog, HBaseConfiguration conf,
|
||||||
final Progressable reporter)
|
final Progressable reporter)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
HRegionInfo info = region.regionInfo;
|
||||||
this.homedir = getStoreHomedir(basedir, info.getEncodedName(),
|
this.homedir = getStoreHomedir(basedir, info.getEncodedName(),
|
||||||
family.getName());
|
family.getName());
|
||||||
this.regioninfo = info;
|
this.region = region;
|
||||||
this.family = family;
|
this.family = family;
|
||||||
this.fs = fs;
|
this.fs = fs;
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
|
@ -317,7 +318,7 @@ public class Store implements HConstants, HeapSize {
|
||||||
// METACOLUMN info such as HBASE::CACHEFLUSH entries
|
// METACOLUMN info such as HBASE::CACHEFLUSH entries
|
||||||
if (/* commented out for now - stack via jgray key.isTransactionEntry() || */
|
if (/* commented out for now - stack via jgray key.isTransactionEntry() || */
|
||||||
val.matchingFamily(HLog.METAFAMILY) ||
|
val.matchingFamily(HLog.METAFAMILY) ||
|
||||||
!Bytes.equals(key.getRegionName(), regioninfo.getRegionName()) ||
|
!Bytes.equals(key.getRegionName(), region.regionInfo.getRegionName()) ||
|
||||||
!val.matchingFamily(family.getName())) {
|
!val.matchingFamily(family.getName())) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -536,7 +537,7 @@ public class Store implements HConstants, HeapSize {
|
||||||
", sequenceid=" + logCacheFlushId +
|
", sequenceid=" + logCacheFlushId +
|
||||||
", memsize=" + StringUtils.humanReadableInt(flushed) +
|
", memsize=" + StringUtils.humanReadableInt(flushed) +
|
||||||
", filesize=" + StringUtils.humanReadableInt(r.length()) +
|
", filesize=" + StringUtils.humanReadableInt(r.length()) +
|
||||||
" to " + this.regioninfo.getRegionNameAsString());
|
" to " + this.region.regionInfo.getRegionNameAsString());
|
||||||
}
|
}
|
||||||
return sf;
|
return sf;
|
||||||
}
|
}
|
||||||
|
@ -637,7 +638,7 @@ public class Store implements HConstants, HeapSize {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
StoreSize compact(final boolean mc) throws IOException {
|
StoreSize compact(final boolean mc) throws IOException {
|
||||||
boolean forceSplit = this.regioninfo.shouldSplit(false);
|
boolean forceSplit = this.region.shouldSplit(false);
|
||||||
boolean majorcompaction = mc;
|
boolean majorcompaction = mc;
|
||||||
synchronized (compactLock) {
|
synchronized (compactLock) {
|
||||||
// filesToCompact are sorted oldest to newest.
|
// filesToCompact are sorted oldest to newest.
|
||||||
|
@ -868,13 +869,17 @@ public class Store implements HConstants, HeapSize {
|
||||||
more = scanner.next(kvs);
|
more = scanner.next(kvs);
|
||||||
// output to writer:
|
// output to writer:
|
||||||
for (KeyValue kv : kvs) {
|
for (KeyValue kv : kvs) {
|
||||||
if (writer == null) writer = getWriter(this.regionCompactionDir);
|
if (writer == null) {
|
||||||
|
writer = getWriter(this.regionCompactionDir);
|
||||||
|
}
|
||||||
writer.append(kv);
|
writer.append(kv);
|
||||||
}
|
}
|
||||||
kvs.clear();
|
kvs.clear();
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
if (scanner != null) scanner.close();
|
if (scanner != null) {
|
||||||
|
scanner.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
MinorCompactingStoreScanner scanner = null;
|
MinorCompactingStoreScanner scanner = null;
|
||||||
|
@ -1473,8 +1478,12 @@ public class Store implements HConstants, HeapSize {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
HRegion getHRegion() {
|
||||||
|
return this.region;
|
||||||
|
}
|
||||||
|
|
||||||
HRegionInfo getHRegionInfo() {
|
HRegionInfo getHRegionInfo() {
|
||||||
return this.regioninfo;
|
return this.region.regionInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -70,17 +70,22 @@ public class TestStore extends TestCase {
|
||||||
private void init(String methodName) throws IOException {
|
private void init(String methodName) throws IOException {
|
||||||
//Setting up a Store
|
//Setting up a Store
|
||||||
Path basedir = new Path(DIR+methodName);
|
Path basedir = new Path(DIR+methodName);
|
||||||
|
Path logdir = new Path(DIR+methodName+"/logs");
|
||||||
HColumnDescriptor hcd = new HColumnDescriptor(family);
|
HColumnDescriptor hcd = new HColumnDescriptor(family);
|
||||||
HBaseConfiguration conf = new HBaseConfiguration();
|
HBaseConfiguration conf = new HBaseConfiguration();
|
||||||
FileSystem fs = FileSystem.get(conf);
|
FileSystem fs = FileSystem.get(conf);
|
||||||
Path reconstructionLog = null;
|
Path reconstructionLog = null;
|
||||||
Progressable reporter = null;
|
Progressable reporter = null;
|
||||||
|
|
||||||
|
fs.delete(logdir, true);
|
||||||
|
|
||||||
HTableDescriptor htd = new HTableDescriptor(table);
|
HTableDescriptor htd = new HTableDescriptor(table);
|
||||||
htd.addFamily(hcd);
|
htd.addFamily(hcd);
|
||||||
HRegionInfo info = new HRegionInfo(htd, null, null, false);
|
HRegionInfo info = new HRegionInfo(htd, null, null, false);
|
||||||
|
HLog hlog = new HLog(fs, logdir, conf, null);
|
||||||
store = new Store(basedir, info, hcd, fs, reconstructionLog, conf,
|
HRegion region = new HRegion(basedir, hlog, fs, conf, info, null);
|
||||||
|
|
||||||
|
store = new Store(basedir, region, hcd, fs, reconstructionLog, conf,
|
||||||
reporter);
|
reporter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,7 +117,7 @@ public class TestStore extends TestCase {
|
||||||
this.store.close();
|
this.store.close();
|
||||||
// Reopen it... should pick up two files
|
// Reopen it... should pick up two files
|
||||||
this.store = new Store(storedir.getParent().getParent(),
|
this.store = new Store(storedir.getParent().getParent(),
|
||||||
this.store.getHRegionInfo(),
|
this.store.getHRegion(),
|
||||||
this.store.getFamily(), fs, null, c, null);
|
this.store.getFamily(), fs, null, c, null);
|
||||||
System.out.println(this.store.getHRegionInfo().getEncodedName());
|
System.out.println(this.store.getHRegionInfo().getEncodedName());
|
||||||
assertEquals(2, this.store.getStorefilesCount());
|
assertEquals(2, this.store.getStorefilesCount());
|
||||||
|
|
Loading…
Reference in New Issue