HADOOP-8146. FsShell commands cannot be interrupted. Contributed by Daryn Shar

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1298976 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uma Maheswara Rao G 2012-03-09 18:48:35 +00:00
parent 4001b95cf2
commit 3c373405e0
3 changed files with 68 additions and 0 deletions

View File

@ -59,6 +59,9 @@ Trunk (unreleased changes)
BUG FIXES
HADOOP-8146. FsShell commands cannot be interrupted
(Daryn Sharp via Uma Maheswara Rao G)
HADOOP-8018. Hudson auto test for HDFS has started throwing javadoc
(Jon Eagles via bobby)

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.fs.shell;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
@ -152,6 +153,9 @@ abstract public class Command extends Configured {
}
processOptions(args);
processRawArguments(args);
} catch (CommandInterruptException e) {
displayError("Interrupted");
return 130;
} catch (IOException e) {
displayError(e);
}
@ -349,6 +353,10 @@ abstract public class Command extends Configured {
public void displayError(Exception e) {
// build up a list of exceptions that occurred
exceptions.add(e);
// use runtime so it rips up through the stack and exits out
if (e instanceof InterruptedIOException) {
throw new CommandInterruptException();
}
String errorMessage = e.getLocalizedMessage();
if (errorMessage == null) {
@ -454,4 +462,7 @@ abstract public class Command extends Configured {
}
return value;
}
@SuppressWarnings("serial")
static class CommandInterruptException extends RuntimeException {}
}

View File

@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.PrintStream;
import java.util.Collections;
import java.util.HashMap;
@ -33,6 +34,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ftpserver.command.impl.STAT;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.shell.CommandFactory;
import org.apache.hadoop.fs.shell.FsCommand;
import org.apache.hadoop.fs.shell.PathData;
import org.apache.hadoop.io.IOUtils;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY;
import org.junit.BeforeClass;
@ -331,6 +335,33 @@ public class TestFsShellReturnCode {
}
@Test
public void testInterrupt() throws Exception {
MyFsShell shell = new MyFsShell();
shell.setConf(new Configuration());
final Path d = new Path(TEST_ROOT_DIR, "testInterrupt");
final Path f1 = new Path(d, "f1");
final Path f2 = new Path(d, "f2");
assertTrue(fileSys.mkdirs(d));
writeFile(fileSys, f1);
assertTrue(fileSys.isFile(f1));
writeFile(fileSys, f2);
assertTrue(fileSys.isFile(f2));
int exitCode = shell.run(
new String[]{ "-testInterrupt", f1.toString(), f2.toString() });
// processing a file throws an interrupt, it should blow on first file
assertEquals(1, InterruptCommand.processed);
assertEquals(130, exitCode);
exitCode = shell.run(
new String[]{ "-testInterrupt", d.toString() });
// processing a file throws an interrupt, it should blow on file
// after descent into dir
assertEquals(2, InterruptCommand.processed);
assertEquals(130, exitCode);
}
static class LocalFileSystemExtn extends LocalFileSystem {
public LocalFileSystemExtn() {
super(new RawLocalFileSystemExtn());
@ -379,4 +410,27 @@ public class TestFsShellReturnCode {
return stat;
}
}
static class MyFsShell extends FsShell {
protected void registerCommands(CommandFactory factory) {
factory.addClass(InterruptCommand.class, "-testInterrupt");
}
}
static class InterruptCommand extends FsCommand {
static int processed = 0;
InterruptCommand() {
processed = 0;
setRecursive(true);
}
@Override
protected void processPath(PathData item) throws IOException {
System.out.println("processing: "+item);
processed++;
if (item.stat.isFile()) {
System.out.println("throw interrupt");
throw new InterruptedIOException();
}
}
}
}