From 355398e4413c0c8799fdab357e2380f6ec69d55c Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Fri, 4 Sep 2015 13:42:55 -0700 Subject: [PATCH] HADOOP-12358. Add -safely flag to rm to prompt when deleting many files. Contributed by Xiaoyu Yao. (cherry picked from commit e1feaf6db03451068c660a863926032b35a569f8) --- .../hadoop-common/CHANGES.txt | 3 + .../fs/CommonConfigurationKeysPublic.java | 6 ++ .../org/apache/hadoop/fs/shell/Delete.java | 61 +++++++++--- .../src/main/resources/core-default.xml | 11 +++ .../java/org/apache/hadoop/cli/TestCLI.java | 2 +- .../apache/hadoop/cli/util/CLICommand.java | 5 +- .../apache/hadoop/cli/util/CLITestCmd.java | 6 +- .../src/test/resources/testConf.xml | 6 +- .../org/apache/hadoop/cli/CLITestCmdDFS.java | 8 +- .../org/apache/hadoop/cli/TestAclCLI.java | 2 +- .../apache/hadoop/cli/TestCacheAdminCLI.java | 7 +- .../apache/hadoop/cli/TestCryptoAdminCLI.java | 6 +- .../org/apache/hadoop/cli/TestDeleteCLI.java | 92 +++++++++++++++++++ .../org/apache/hadoop/cli/TestHDFSCLI.java | 4 +- .../org/apache/hadoop/cli/TestXAttrCLI.java | 2 +- .../server/namenode/TestStorageRestore.java | 3 +- .../src/test/resources/testDeleteConf.xml | 83 +++++++++++++++++ .../org/apache/hadoop/cli/CLITestCmdMR.java | 3 +- 18 files changed, 275 insertions(+), 35 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 769e5569a2f..5f7ddc22a95 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -255,6 +255,9 @@ Release 2.8.0 - UNRELEASED HADOOP-12325. RPC Metrics : Add the ability track and log slow RPCs. (Anu Engineer via xyao) + HADOOP-12358. Add -safely flag to rm to prompt when deleting many files. + (xyao via wang) + OPTIMIZATIONS HADOOP-11785. Reduce the number of listStatus operation in distcp diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 65acf9981bb..ccb49c3c13a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -394,5 +394,11 @@ public class CommonConfigurationKeysPublic { "hadoop.shell.missing.defaultFs.warning"; public static final boolean HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_DEFAULT = false; + + /** See core-default.xml */ + public static final String HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES = + "hadoop.shell.safely.delete.limit.num.files"; + public static final long HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT = + 100; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java index 40d94787d3c..ec45d173bf8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.PathIsDirectoryException; @@ -32,9 +33,13 @@ import org.apache.hadoop.fs.PathIsNotDirectoryException; import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.fs.PathNotFoundException; import org.apache.hadoop.fs.Trash; +import org.apache.hadoop.util.ToolRunner; + +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT; /** - * Classes that delete paths + * Classes that delete paths. */ @InterfaceAudience.Private @InterfaceStability.Evolving @@ -50,28 +55,36 @@ class Delete { /** remove non-directory paths */ public static class Rm extends FsCommand { public static final String NAME = "rm"; - public static final String USAGE = "[-f] [-r|-R] [-skipTrash] ..."; + public static final String USAGE = "[-f] [-r|-R] [-skipTrash] " + + "[-safely] ..."; public static final String DESCRIPTION = - "Delete all files that match the specified file pattern. " + - "Equivalent to the Unix command \"rm \"\n" + - "-skipTrash: option bypasses trash, if enabled, and immediately " + - "deletes \n" + - "-f: If the file does not exist, do not display a diagnostic " + - "message or modify the exit status to reflect an error.\n" + - "-[rR]: Recursively deletes directories"; + "Delete all files that match the specified file pattern. " + + "Equivalent to the Unix command \"rm \"\n" + + "-f: If the file does not exist, do not display a diagnostic " + + "message or modify the exit status to reflect an error.\n" + + "-[rR]: Recursively deletes directories.\n" + + "-skipTrash: option bypasses trash, if enabled, and immediately " + + "deletes .\n" + + "-safely: option requires safety confirmation,if enabled, " + + "requires confirmation before deleting large directory with more " + + "than files. Delay is " + + "expected when walking over large directory recursively to count " + + "the number of files to be deleted before the confirmation.\n"; private boolean skipTrash = false; private boolean deleteDirs = false; private boolean ignoreFNF = false; - + private boolean safeDelete = false; + @Override protected void processOptions(LinkedList args) throws IOException { CommandFormat cf = new CommandFormat( - 1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash"); + 1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash", "safely"); cf.parse(args); ignoreFNF = cf.getOpt("f"); deleteDirs = cf.getOpt("r") || cf.getOpt("R"); skipTrash = cf.getOpt("skipTrash"); + safeDelete = cf.getOpt("safely"); } @Override @@ -102,7 +115,7 @@ class Delete { // problem (ie. creating the trash dir, moving the item to be deleted, // etc), then the path will just be deleted because moveToTrash returns // false and it falls thru to fs.delete. this doesn't seem right - if (moveToTrash(item)) { + if (moveToTrash(item) || !canBeSafelyDeleted(item)) { return; } if (!item.fs.delete(item.path, deleteDirs)) { @@ -111,6 +124,28 @@ class Delete { out.println("Deleted " + item); } + private boolean canBeSafelyDeleted(PathData item) + throws IOException { + boolean shouldDelete = true; + if (safeDelete) { + final long deleteLimit = getConf().getLong( + HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES, + HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT); + if (deleteLimit > 0) { + ContentSummary cs = item.fs.getContentSummary(item.path); + final long numFiles = cs.getFileCount(); + if (numFiles > deleteLimit) { + if (!ToolRunner.confirmPrompt("Proceed deleting " + numFiles + + " files?")) { + System.err.println("Delete aborted at user request.\n"); + shouldDelete = false; + } + } + } + } + return shouldDelete; + } + private boolean moveToTrash(PathData item) throws IOException { boolean success = false; if (!skipTrash) { @@ -122,7 +157,7 @@ class Delete { String msg = ioe.getMessage(); if (ioe.getCause() != null) { msg += ": " + ioe.getCause().getMessage(); - } + } throw new IOException(msg + ". Consider using -skipTrash option", ioe); } } diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index ca33cf585f4..b5d646347ae 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -1964,4 +1964,15 @@ for ldap providers in the same way as above does. hadoop.shell.missing.defaultFs.warning false + + + hadoop.shell.safely.delete.limit.num.files + 100 + Used by -safely option of hadoop fs shell -rm command to avoid + accidental deletion of large directories. When enabled, the -rm command + requires confirmation if the number of files to be deleted is greater than + this limit. The default limit is 100 files. The warning is disabled if + the limit is 0 or the -safely is not specified in -rm command. + + diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java index c3503882f51..e1514ff1372 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java @@ -42,7 +42,7 @@ public class TestCLI extends CLITestHelper { @Override protected CommandExecutor.Result execute(CLICommand cmd) throws Exception { - return cmd.getExecutor("").executeCommand(cmd.getCmd()); + return cmd.getExecutor("", conf).executeCommand(cmd.getCmd()); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java index 50cb3a53c52..8823f5c3832 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java @@ -17,11 +17,14 @@ */ package org.apache.hadoop.cli.util; +import org.apache.hadoop.conf.Configuration; + /** * This interface is to generalize types of test command for upstream projects */ public interface CLICommand { - public CommandExecutor getExecutor(String tag) throws IllegalArgumentException; + public CommandExecutor getExecutor(String tag, Configuration conf) + throws IllegalArgumentException; public CLICommandTypes getType(); public String getCmd(); @Override diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java index 602a07f3d58..d912fadd3f2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.cli.util; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FsShell; /** @@ -32,9 +33,10 @@ public class CLITestCmd implements CLICommand { } @Override - public CommandExecutor getExecutor(String tag) throws IllegalArgumentException { + public CommandExecutor getExecutor(String tag, Configuration conf) + throws IllegalArgumentException { if (getType() instanceof CLICommandFS) - return new FSCmdExecutor(tag, new FsShell()); + return new FSCmdExecutor(tag, new FsShell(conf)); throw new IllegalArgumentException("Unknown type of test command: " + getType()); } diff --git a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml index 8f48ccc1e2f..1f050be41f8 100644 --- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml +++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml @@ -391,7 +391,7 @@ RegexpComparator - ^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] <src> \.\.\. :\s* + ^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] \[-safely\] <src> \.\.\. :\s* RegexpComparator @@ -403,7 +403,7 @@ RegexpComparator - ^\s*-skipTrash\s+option bypasses trash, if enabled, and immediately deletes <src>( )* + ^\s*-skipTrash\s+option bypasses trash, if enabled, and immediately deletes <src>\.( )* RegexpComparator @@ -415,7 +415,7 @@ RegexpComparator - ^\s+-\[rR\]\s+Recursively deletes directories\s* + ^\s+-\[rR\]\s+Recursively deletes directories\.\s* diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java index 89932cc67cc..992e8fee1ad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java @@ -22,6 +22,7 @@ import org.apache.hadoop.cli.util.CLICommandTypes; import org.apache.hadoop.cli.util.CLITestCmd; import org.apache.hadoop.cli.util.CommandExecutor; import org.apache.hadoop.cli.util.FSCmdExecutor; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.tools.DFSAdmin; public class CLITestCmdDFS extends CLITestCmd { @@ -30,9 +31,10 @@ public class CLITestCmdDFS extends CLITestCmd { } @Override - public CommandExecutor getExecutor(String tag) throws IllegalArgumentException { + public CommandExecutor getExecutor(String tag, Configuration conf) + throws IllegalArgumentException { if (getType() instanceof CLICommandDFSAdmin) - return new FSCmdExecutor(tag, new DFSAdmin()); - return super.getExecutor(tag); + return new FSCmdExecutor(tag, new DFSAdmin(conf)); + return super.getExecutor(tag, conf); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java index 02207e60535..a6f8651b355 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java @@ -73,7 +73,7 @@ public class TestAclCLI extends CLITestHelperDFS { @Override protected Result execute(CLICommand cmd) throws Exception { - return cmd.getExecutor(namenode).executeCommand(cmd.getCmd()); + return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd()); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java index f25c4fe01ab..ddb11b6cf0f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java @@ -29,6 +29,7 @@ import org.apache.hadoop.cli.util.CLITestCmd; import org.apache.hadoop.cli.util.CacheAdminCmdExecutor; import org.apache.hadoop.cli.util.CommandExecutor; import org.apache.hadoop.cli.util.CommandExecutor.Result; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DistributedFileSystem; @@ -119,18 +120,18 @@ public class TestCacheAdminCLI extends CLITestHelper { } @Override - public CommandExecutor getExecutor(String tag) + public CommandExecutor getExecutor(String tag, Configuration conf) throws IllegalArgumentException { if (getType() instanceof CLICommandCacheAdmin) { return new CacheAdminCmdExecutor(tag, new CacheAdmin(conf)); } - return super.getExecutor(tag); + return super.getExecutor(tag, conf); } } @Override protected Result execute(CLICommand cmd) throws Exception { - return cmd.getExecutor("").executeCommand(cmd.getCmd()); + return cmd.getExecutor("", conf).executeCommand(cmd.getCmd()); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java index 1c870a2801d..44e662fac97 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java @@ -149,18 +149,18 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS { } @Override - public CommandExecutor getExecutor(String tag) + public CommandExecutor getExecutor(String tag, Configuration conf) throws IllegalArgumentException { if (getType() instanceof CLICommandCryptoAdmin) { return new CryptoAdminCmdExecutor(tag, new CryptoAdmin(conf)); } - return super.getExecutor(tag); + return super.getExecutor(tag, conf); } } @Override protected Result execute(CLICommand cmd) throws Exception { - return cmd.getExecutor(namenode).executeCommand(cmd.getCmd()); + return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd()); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java new file mode 100644 index 00000000000..04328f52800 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java @@ -0,0 +1,92 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.cli; + +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.cli.util.CLICommand; +import org.apache.hadoop.cli.util.CommandExecutor.Result; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class TestDeleteCLI extends CLITestHelperDFS { + protected MiniDFSCluster dfsCluster = null; + protected FileSystem fs = null; + protected String namenode = null; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); + conf.setLong(CommonConfigurationKeysPublic. + HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES, 5); + + dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); + dfsCluster.waitClusterUp(); + namenode = conf.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///"); + + fs = dfsCluster.getFileSystem(); + assertTrue("Not an HDFS: " + fs.getUri(), + fs instanceof DistributedFileSystem); + } + + @After + @Override + public void tearDown() throws Exception { + if (fs != null) { + fs.close(); + } + if (dfsCluster != null) { + dfsCluster.shutdown(); + } + Thread.sleep(2000); + super.tearDown(); + } + + @Override + protected String getTestFile() { + return "testDeleteConf.xml"; + } + + @Override + protected String expandCommand(final String cmd) { + String expCmd = cmd; + expCmd = expCmd.replaceAll("NAMENODE", namenode); + expCmd = super.expandCommand(expCmd); + return expCmd; + } + + @Override + protected Result execute(CLICommand cmd) throws Exception { + return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd()); + } + + @Test + @Override + public void testAll () { + super.testAll(); + } +} \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java index c34834847c8..36307262874 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java @@ -47,7 +47,7 @@ public class TestHDFSCLI extends CLITestHelperDFS { // Many of the tests expect a replication value of 1 in the output conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); - + // Build racks and hosts configuration to test dfsAdmin -printTopology String [] racks = {"/rack1", "/rack1", "/rack2", "/rack2", "/rack2", "/rack3", "/rack4", "/rack4" }; @@ -95,7 +95,7 @@ public class TestHDFSCLI extends CLITestHelperDFS { @Override protected Result execute(CLICommand cmd) throws Exception { - return cmd.getExecutor(namenode).executeCommand(cmd.getCmd()); + return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd()); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java index ce107ef56c1..45c7909c17c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java @@ -87,7 +87,7 @@ public class TestXAttrCLI extends CLITestHelperDFS { @Override protected Result execute(CLICommand cmd) throws Exception { - return cmd.getExecutor(namenode).executeCommand(cmd.getCmd()); + return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd()); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java index 1a612e83ec7..6f4546db884 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java @@ -275,7 +275,8 @@ public class TestStorageRestore { String cmd = "-fs NAMENODE -restoreFailedStorage false"; String namenode = config.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///"); CommandExecutor executor = - new CLITestCmdDFS(cmd, new CLICommandDFSAdmin()).getExecutor(namenode); + new CLITestCmdDFS(cmd, + new CLICommandDFSAdmin()).getExecutor(namenode, config); executor.executeCommand(cmd); restore = fsi.getStorage().getRestoreFailedStorage(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml new file mode 100644 index 00000000000..87019830c46 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml @@ -0,0 +1,83 @@ + + + + + + + + test + + + + + rm -r directory that meet warning criteria when -safely is not used + + -fs NAMENODE -mkdir /dir0 + -fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/data15bytes + -fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/data30bytes + -fs NAMENODE -copyFromLocal CLITEST_DATA/data60bytes /dir0/data60bytes + -fs NAMENODE -copyFromLocal CLITEST_DATA/data120bytes /dir0/data120bytes + -fs NAMENODE -mkdir /dir0/dir00 + -fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/dir00/data15bytes + -fs NAMENODE -mkdir /dir0/dir01 + -fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/dir01/data30bytes + -fs NAMENODE -ls /dir0 + -fs NAMENODE -rm -r /dir0 + + + -fs NAMENODE -rm -r /dir0 + + + + RegexpComparator + Deleted /dir0 + + + + + rm -r directory that does not meet warning criteria when -safely is used + + -fs NAMENODE -mkdir /dir0 + -fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/data15bytes + -fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/data30bytes + -fs NAMENODE -copyFromLocal CLITEST_DATA/data60bytes /dir0/data60bytes + -fs NAMENODE -mkdir /dir0/dir00 + -fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/dir00/data15bytes + -fs NAMENODE -mkdir /dir0/dir01 + -fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/dir01/data30bytes + -fs NAMENODE -ls /dir0 + -fs NAMENODE -rm -r -safely /dir0 + + + -fs NAMENODE -rm -r /dir0 + + + + RegexpComparator + Deleted /dir0 + + + + + diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java index f9bc9430af8..f4ab3dc5b92 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java @@ -20,6 +20,7 @@ package org.apache.hadoop.cli; import org.apache.hadoop.cli.util.CLICommandTypes; import org.apache.hadoop.cli.util.CLITestCmd; import org.apache.hadoop.cli.util.CommandExecutor; +import org.apache.hadoop.conf.Configuration; public class CLITestCmdMR extends CLITestCmd { public CLITestCmdMR(String str, CLICommandTypes type) { @@ -34,7 +35,7 @@ public class CLITestCmdMR extends CLITestCmd { * of the test method. */ @Override - public CommandExecutor getExecutor(String tag) + public CommandExecutor getExecutor(String tag, Configuration conf) throws IllegalArgumentException { throw new IllegalArgumentException("Method isn't supported"); }