HBASE-20732 Shutdown scan pool when master is stopped
This commit is contained in:
parent
03946a389a
commit
c81d0aff95
|
@ -1286,6 +1286,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
|
|||
}
|
||||
super.stopServiceThreads();
|
||||
stopChores();
|
||||
CleanerChore.shutDownChorePool();
|
||||
|
||||
// Wait for all the remaining region servers to report in IFF we were
|
||||
// running a cluster shutdown AND we were NOT aborting.
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.hbase.master.cleaner;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -118,7 +119,7 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
|
|||
break;
|
||||
}
|
||||
}
|
||||
pool.shutdownNow();
|
||||
shutDownNow();
|
||||
LOG.info("Update chore's pool size from " + pool.getParallelism() + " to " + size);
|
||||
pool = new ForkJoinPool(size);
|
||||
}
|
||||
|
@ -136,6 +137,13 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
|
|||
synchronized void submit(ForkJoinTask task) {
|
||||
pool.submit(task);
|
||||
}
|
||||
|
||||
synchronized void shutDownNow() {
|
||||
if (pool == null || pool.isShutdown()) {
|
||||
return;
|
||||
}
|
||||
pool.shutdownNow();
|
||||
}
|
||||
}
|
||||
// It may be waste resources for each cleaner chore own its pool,
|
||||
// so let's make pool for all cleaner chores.
|
||||
|
@ -154,12 +162,18 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
|
|||
}
|
||||
}
|
||||
|
||||
public static void shutDownChorePool() {
|
||||
if (POOL != null) {
|
||||
POOL.shutDownNow();
|
||||
POOL = null;
|
||||
}
|
||||
}
|
||||
|
||||
public CleanerChore(String name, final int sleepPeriod, final Stoppable s, Configuration conf,
|
||||
FileSystem fs, Path oldFileDir, String confKey) {
|
||||
this(name, sleepPeriod, s, conf, fs, oldFileDir, confKey, null);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param name name of the chore being run
|
||||
* @param sleepPeriod the period of time to sleep between each run
|
||||
|
@ -432,6 +446,7 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
|
|||
protected Boolean compute() {
|
||||
LOG.trace("Cleaning under " + dir);
|
||||
List<FileStatus> subDirs;
|
||||
List<FileStatus> tmpFiles;
|
||||
final List<FileStatus> files;
|
||||
try {
|
||||
// if dir doesn't exist, we'll get null back for both of these
|
||||
|
@ -442,48 +457,48 @@ public abstract class CleanerChore<T extends FileCleanerDelegate> extends Schedu
|
|||
return f.isDirectory();
|
||||
}
|
||||
});
|
||||
files = FSUtils.listStatusWithStatusFilter(fs, dir, new FileStatusFilter() {
|
||||
if (subDirs == null) {
|
||||
subDirs = Collections.emptyList();
|
||||
}
|
||||
tmpFiles = FSUtils.listStatusWithStatusFilter(fs, dir, new FileStatusFilter() {
|
||||
@Override
|
||||
public boolean accept(FileStatus f) {
|
||||
return f.isFile();
|
||||
}
|
||||
});
|
||||
files = tmpFiles == null ? Collections.<FileStatus>emptyList() : tmpFiles;
|
||||
} catch (IOException ioe) {
|
||||
LOG.warn("failed to get FileStatus for contents of '" + dir + "'", ioe);
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean nullSubDirs = subDirs == null;
|
||||
if (nullSubDirs) {
|
||||
LOG.trace("There is no subdir under " + dir);
|
||||
}
|
||||
if (files == null) {
|
||||
LOG.trace("There is no file under " + dir);
|
||||
boolean allFilesDeleted = true;
|
||||
if (!files.isEmpty()) {
|
||||
allFilesDeleted = deleteAction(new Action<Boolean>() {
|
||||
@Override
|
||||
public Boolean act() throws IOException {
|
||||
return checkAndDeleteFiles(files);
|
||||
}
|
||||
}, "files");
|
||||
}
|
||||
|
||||
int capacity = nullSubDirs ? 0 : subDirs.size();
|
||||
final List<CleanerTask> tasks = Lists.newArrayListWithCapacity(capacity);
|
||||
if (!nullSubDirs) {
|
||||
boolean allSubdirsDeleted = true;
|
||||
if (!subDirs.isEmpty()) {
|
||||
final List<CleanerTask> tasks = Lists.newArrayListWithCapacity(subDirs.size());
|
||||
for (FileStatus subdir : subDirs) {
|
||||
CleanerTask task = new CleanerTask(subdir, false);
|
||||
tasks.add(task);
|
||||
task.fork();
|
||||
}
|
||||
allSubdirsDeleted = deleteAction(new Action<Boolean>() {
|
||||
@Override
|
||||
public Boolean act() throws IOException {
|
||||
return getCleanResult(tasks);
|
||||
}
|
||||
}, "subdirs");
|
||||
}
|
||||
|
||||
boolean result = true;
|
||||
result &= deleteAction(new Action<Boolean>() {
|
||||
@Override
|
||||
public Boolean act() throws IOException {
|
||||
return checkAndDeleteFiles(files);
|
||||
}
|
||||
}, "files");
|
||||
result &= deleteAction(new Action<Boolean>() {
|
||||
@Override
|
||||
public Boolean act() throws IOException {
|
||||
return getCleanResult(tasks);
|
||||
}
|
||||
}, "subdirs");
|
||||
boolean result = allFilesDeleted && allSubdirsDeleted;
|
||||
// if and only if files and subdirs under current dir are deleted successfully, and
|
||||
// it is not the root dir, then task will try to delete it.
|
||||
if (result && !root) {
|
||||
|
|
|
@ -38,8 +38,8 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
|
|||
import org.apache.hadoop.hbase.Stoppable;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.util.StoppableImplementation;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.Mockito;
|
||||
|
@ -52,16 +52,17 @@ public class TestCleanerChore {
|
|||
private static final Log LOG = LogFactory.getLog(TestCleanerChore.class);
|
||||
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
@BeforeClass
|
||||
public static void setup() {
|
||||
CleanerChore.initChorePool(UTIL.getConfiguration());
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() throws Exception {
|
||||
@AfterClass
|
||||
public static void cleanup() throws Exception {
|
||||
// delete and recreate the test directory, ensuring a clean test dir between tests
|
||||
UTIL.cleanupTestDir();
|
||||
}
|
||||
CleanerChore.shutDownChorePool();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
|
@ -296,6 +297,7 @@ public class TestCleanerChore {
|
|||
*/
|
||||
@Test
|
||||
public void testNoExceptionFromDirectoryWithRacyChildren() throws Exception {
|
||||
UTIL.cleanupTestDir();
|
||||
Stoppable stop = new StoppableImplementation();
|
||||
// need to use a localutil to not break the rest of the test that runs on the local FS, which
|
||||
// gets hosed when we start to use a minicluster.
|
||||
|
|
Loading…
Reference in New Issue