HBASE-990 NoSuchElementException in flushSomeRegions; took two attempts
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@718636 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
83cef9ee78
commit
7525d8ff53
|
@ -69,7 +69,7 @@ Release 0.19.0 - Unreleased
|
|||
HBASE-951 Either shut down master or let it finish cleanup
|
||||
HBASE-964, HBASE-678 provide for safe-mode without locking up HBase "waiting
|
||||
for root region"
|
||||
HBASE-990 NoSuchElementException in flushSomeRegions
|
||||
HBASE-990 NoSuchElementException in flushSomeRegions; took two attempts.
|
||||
HBASE-602 HBase Crash when network card has a IPv6 address
|
||||
HBASE-996 Migration script to up the versions in catalog tables
|
||||
HBASE-991 Update the mapred package document examples so they work with
|
||||
|
|
|
@ -219,15 +219,17 @@ class MemcacheFlusher extends Thread implements FlushRequester {
|
|||
* to this regionserver are blocked.
|
||||
*/
|
||||
private synchronized void flushSomeRegions() {
|
||||
SortedMap<Long, HRegion> m =
|
||||
this.server.getCopyOfOnlineRegionsSortedBySize();
|
||||
if (m.size() <= 0) {
|
||||
LOG.info("No online regions to flush though we've been asked flush some.");
|
||||
return;
|
||||
}
|
||||
// keep flushing until we hit the low water mark
|
||||
while (server.getGlobalMemcacheSize() >= globalMemcacheLimitLowMark) {
|
||||
for (SortedMap<Long, HRegion> m =
|
||||
this.server.getCopyOfOnlineRegionsSortedBySize();
|
||||
server.getGlobalMemcacheSize() >= globalMemcacheLimitLowMark;) {
|
||||
// flush the region with the biggest memcache
|
||||
if (m.size() <= 0) {
|
||||
LOG.info("No online regions to flush though we've been asked flush " +
|
||||
"some; globalMemcacheSize=" + this.server.getGlobalMemcacheSize() +
|
||||
", globalMemcacheLimitLowMark=" + this.globalMemcacheLimitLowMark);
|
||||
break;
|
||||
}
|
||||
HRegion biggestMemcacheRegion = m.remove(m.firstKey());
|
||||
if (!flushRegion(biggestMemcacheRegion, true)) {
|
||||
// Something bad happened - give up.
|
||||
|
|
Loading…
Reference in New Issue