diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 6017c4363fd..b7270676314 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -301,6 +301,9 @@ Release 2.0.3-alpha - 2013-02-06 HADOOP-9278. Fix the file handle leak in HarMetaData.parseMetaData() in HarFileSystem. (Chris Nauroth via szetszwo) + HADOOP-9289. FsShell rm -f fails for non-matching globs. (Daryn Sharp via + suresh) + Release 2.0.2-alpha - 2012-09-07 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java index 4dc550501c8..ed190d37461 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java @@ -21,6 +21,7 @@ package org.apache.hadoop.fs.shell; import java.io.FileNotFoundException; import java.io.IOException; import java.util.LinkedList; +import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -28,6 +29,7 @@ import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.PathIsDirectoryException; import org.apache.hadoop.fs.PathIsNotDirectoryException; import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; +import org.apache.hadoop.fs.PathNotFoundException; import org.apache.hadoop.fs.Trash; /** @@ -71,6 +73,19 @@ class Delete { skipTrash = cf.getOpt("skipTrash"); } + @Override + protected List expandArgument(String arg) throws IOException { + try { + return super.expandArgument(arg); + } catch (PathNotFoundException e) { + if (!ignoreFNF) { + throw e; + } + // prevent -f on a non-existent glob from failing + return new LinkedList(); + } + } + @Override protected void processNonexistentPath(PathData item) throws IOException { if (!ignoreFNF) super.processNonexistentPath(item); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index c4cc64790c9..4bb67a0786c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -299,6 +299,46 @@ public class TestFsShellReturnCode { } } + @Test + public void testRmWithNonexistentGlob() throws Exception { + Configuration conf = new Configuration(); + FsShell shell = new FsShell(); + shell.setConf(conf); + final ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + final PrintStream err = new PrintStream(bytes); + final PrintStream oldErr = System.err; + System.setErr(err); + final String results; + try { + int exit = shell.run(new String[]{"-rm", "nomatch*"}); + assertEquals(1, exit); + results = bytes.toString(); + assertTrue(results.contains("rm: `nomatch*': No such file or directory")); + } finally { + IOUtils.closeStream(err); + System.setErr(oldErr); + } + } + + @Test + public void testRmForceWithNonexistentGlob() throws Exception { + Configuration conf = new Configuration(); + FsShell shell = new FsShell(); + shell.setConf(conf); + final ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + final PrintStream err = new PrintStream(bytes); + final PrintStream oldErr = System.err; + System.setErr(err); + try { + int exit = shell.run(new String[]{"-rm", "-f", "nomatch*"}); + assertEquals(0, exit); + assertTrue(bytes.toString().isEmpty()); + } finally { + IOUtils.closeStream(err); + System.setErr(oldErr); + } + } + @Test public void testInvalidDefaultFS() throws Exception { // if default fs doesn't exist or is invalid, but the path provided in