HADOOP-12358. Add -safely flag to rm to prompt when deleting many files. Contributed by Xiaoyu Yao.

(cherry picked from commit e1feaf6db0)
This commit is contained in:
Andrew Wang 2015-09-04 13:42:55 -07:00
parent 67bce1e827
commit 355398e441
18 changed files with 275 additions and 35 deletions

View File

@ -255,6 +255,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12325. RPC Metrics : Add the ability track and log slow RPCs.
(Anu Engineer via xyao)
HADOOP-12358. Add -safely flag to rm to prompt when deleting many files.
(xyao via wang)
OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp

View File

@ -394,5 +394,11 @@ public class CommonConfigurationKeysPublic {
"hadoop.shell.missing.defaultFs.warning";
public static final boolean HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_DEFAULT =
false;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES =
"hadoop.shell.safely.delete.limit.num.files";
public static final long HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT =
100;
}

View File

@ -25,6 +25,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.PathIOException;
import org.apache.hadoop.fs.PathIsDirectoryException;
@ -32,9 +33,13 @@ import org.apache.hadoop.fs.PathIsNotDirectoryException;
import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.PathNotFoundException;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.util.ToolRunner;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT;
/**
* Classes that delete paths
* Classes that delete paths.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
@ -50,28 +55,36 @@ class Delete {
/** remove non-directory paths */
public static class Rm extends FsCommand {
public static final String NAME = "rm";
public static final String USAGE = "[-f] [-r|-R] [-skipTrash] <src> ...";
public static final String USAGE = "[-f] [-r|-R] [-skipTrash] " +
"[-safely] <src> ...";
public static final String DESCRIPTION =
"Delete all files that match the specified file pattern. " +
"Equivalent to the Unix command \"rm <src>\"\n" +
"-skipTrash: option bypasses trash, if enabled, and immediately " +
"deletes <src>\n" +
"-f: If the file does not exist, do not display a diagnostic " +
"message or modify the exit status to reflect an error.\n" +
"-[rR]: Recursively deletes directories";
"Delete all files that match the specified file pattern. " +
"Equivalent to the Unix command \"rm <src>\"\n" +
"-f: If the file does not exist, do not display a diagnostic " +
"message or modify the exit status to reflect an error.\n" +
"-[rR]: Recursively deletes directories.\n" +
"-skipTrash: option bypasses trash, if enabled, and immediately " +
"deletes <src>.\n" +
"-safely: option requires safety confirmationif enabled, " +
"requires confirmation before deleting large directory with more " +
"than <hadoop.shell.delete.limit.num.files> files. Delay is " +
"expected when walking over large directory recursively to count " +
"the number of files to be deleted before the confirmation.\n";
private boolean skipTrash = false;
private boolean deleteDirs = false;
private boolean ignoreFNF = false;
private boolean safeDelete = false;
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
CommandFormat cf = new CommandFormat(
1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash");
1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash", "safely");
cf.parse(args);
ignoreFNF = cf.getOpt("f");
deleteDirs = cf.getOpt("r") || cf.getOpt("R");
skipTrash = cf.getOpt("skipTrash");
safeDelete = cf.getOpt("safely");
}
@Override
@ -102,7 +115,7 @@ class Delete {
// problem (ie. creating the trash dir, moving the item to be deleted,
// etc), then the path will just be deleted because moveToTrash returns
// false and it falls thru to fs.delete. this doesn't seem right
if (moveToTrash(item)) {
if (moveToTrash(item) || !canBeSafelyDeleted(item)) {
return;
}
if (!item.fs.delete(item.path, deleteDirs)) {
@ -111,6 +124,28 @@ class Delete {
out.println("Deleted " + item);
}
private boolean canBeSafelyDeleted(PathData item)
throws IOException {
boolean shouldDelete = true;
if (safeDelete) {
final long deleteLimit = getConf().getLong(
HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES,
HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT);
if (deleteLimit > 0) {
ContentSummary cs = item.fs.getContentSummary(item.path);
final long numFiles = cs.getFileCount();
if (numFiles > deleteLimit) {
if (!ToolRunner.confirmPrompt("Proceed deleting " + numFiles +
" files?")) {
System.err.println("Delete aborted at user request.\n");
shouldDelete = false;
}
}
}
}
return shouldDelete;
}
private boolean moveToTrash(PathData item) throws IOException {
boolean success = false;
if (!skipTrash) {
@ -122,7 +157,7 @@ class Delete {
String msg = ioe.getMessage();
if (ioe.getCause() != null) {
msg += ": " + ioe.getCause().getMessage();
}
}
throw new IOException(msg + ". Consider using -skipTrash option", ioe);
}
}

View File

@ -1964,4 +1964,15 @@ for ldap providers in the same way as above does.
<name>hadoop.shell.missing.defaultFs.warning</name>
<value>false</value>
</property>
<property>
<name>hadoop.shell.safely.delete.limit.num.files</name>
<value>100</value>
<description>Used by -safely option of hadoop fs shell -rm command to avoid
accidental deletion of large directories. When enabled, the -rm command
requires confirmation if the number of files to be deleted is greater than
this limit. The default limit is 100 files. The warning is disabled if
the limit is 0 or the -safely is not specified in -rm command.
</description>
</property>
</configuration>

View File

@ -42,7 +42,7 @@ public class TestCLI extends CLITestHelper {
@Override
protected CommandExecutor.Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor("").executeCommand(cmd.getCmd());
return cmd.getExecutor("", conf).executeCommand(cmd.getCmd());
}

View File

@ -17,11 +17,14 @@
*/
package org.apache.hadoop.cli.util;
import org.apache.hadoop.conf.Configuration;
/**
* This interface is to generalize types of test command for upstream projects
*/
public interface CLICommand {
public CommandExecutor getExecutor(String tag) throws IllegalArgumentException;
public CommandExecutor getExecutor(String tag, Configuration conf)
throws IllegalArgumentException;
public CLICommandTypes getType();
public String getCmd();
@Override

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.cli.util;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell;
/**
@ -32,9 +33,10 @@ public class CLITestCmd implements CLICommand {
}
@Override
public CommandExecutor getExecutor(String tag) throws IllegalArgumentException {
public CommandExecutor getExecutor(String tag, Configuration conf)
throws IllegalArgumentException {
if (getType() instanceof CLICommandFS)
return new FSCmdExecutor(tag, new FsShell());
return new FSCmdExecutor(tag, new FsShell(conf));
throw new
IllegalArgumentException("Unknown type of test command: " + getType());
}

View File

@ -391,7 +391,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
<expected-output>^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] &lt;src&gt; \.\.\. :\s*</expected-output>
<expected-output>^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] \[-safely\] &lt;src&gt; \.\.\. :\s*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@ -403,7 +403,7 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
<expected-output>^\s*-skipTrash\s+option bypasses trash, if enabled, and immediately deletes &lt;src&gt;( )*</expected-output>
<expected-output>^\s*-skipTrash\s+option bypasses trash, if enabled, and immediately deletes &lt;src&gt;\.( )*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@ -415,7 +415,7 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
<expected-output>^\s+-\[rR\]\s+Recursively deletes directories\s*</expected-output>
<expected-output>^\s+-\[rR\]\s+Recursively deletes directories\.\s*</expected-output>
</comparator>
</comparators>
</test>

View File

@ -22,6 +22,7 @@ import org.apache.hadoop.cli.util.CLICommandTypes;
import org.apache.hadoop.cli.util.CLITestCmd;
import org.apache.hadoop.cli.util.CommandExecutor;
import org.apache.hadoop.cli.util.FSCmdExecutor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.tools.DFSAdmin;
public class CLITestCmdDFS extends CLITestCmd {
@ -30,9 +31,10 @@ public class CLITestCmdDFS extends CLITestCmd {
}
@Override
public CommandExecutor getExecutor(String tag) throws IllegalArgumentException {
public CommandExecutor getExecutor(String tag, Configuration conf)
throws IllegalArgumentException {
if (getType() instanceof CLICommandDFSAdmin)
return new FSCmdExecutor(tag, new DFSAdmin());
return super.getExecutor(tag);
return new FSCmdExecutor(tag, new DFSAdmin(conf));
return super.getExecutor(tag, conf);
}
}

View File

@ -73,7 +73,7 @@ public class TestAclCLI extends CLITestHelperDFS {
@Override
protected Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
}
@Test

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.cli.util.CLITestCmd;
import org.apache.hadoop.cli.util.CacheAdminCmdExecutor;
import org.apache.hadoop.cli.util.CommandExecutor;
import org.apache.hadoop.cli.util.CommandExecutor.Result;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DistributedFileSystem;
@ -119,18 +120,18 @@ public class TestCacheAdminCLI extends CLITestHelper {
}
@Override
public CommandExecutor getExecutor(String tag)
public CommandExecutor getExecutor(String tag, Configuration conf)
throws IllegalArgumentException {
if (getType() instanceof CLICommandCacheAdmin) {
return new CacheAdminCmdExecutor(tag, new CacheAdmin(conf));
}
return super.getExecutor(tag);
return super.getExecutor(tag, conf);
}
}
@Override
protected Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor("").executeCommand(cmd.getCmd());
return cmd.getExecutor("", conf).executeCommand(cmd.getCmd());
}
@Test

View File

@ -149,18 +149,18 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
}
@Override
public CommandExecutor getExecutor(String tag)
public CommandExecutor getExecutor(String tag, Configuration conf)
throws IllegalArgumentException {
if (getType() instanceof CLICommandCryptoAdmin) {
return new CryptoAdminCmdExecutor(tag, new CryptoAdmin(conf));
}
return super.getExecutor(tag);
return super.getExecutor(tag, conf);
}
}
@Override
protected Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
}
@Test

View File

@ -0,0 +1,92 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.cli;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.cli.util.CLICommand;
import org.apache.hadoop.cli.util.CommandExecutor.Result;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class TestDeleteCLI extends CLITestHelperDFS {
protected MiniDFSCluster dfsCluster = null;
protected FileSystem fs = null;
protected String namenode = null;
@Before
@Override
public void setUp() throws Exception {
super.setUp();
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
conf.setLong(CommonConfigurationKeysPublic.
HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES, 5);
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
dfsCluster.waitClusterUp();
namenode = conf.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///");
fs = dfsCluster.getFileSystem();
assertTrue("Not an HDFS: " + fs.getUri(),
fs instanceof DistributedFileSystem);
}
@After
@Override
public void tearDown() throws Exception {
if (fs != null) {
fs.close();
}
if (dfsCluster != null) {
dfsCluster.shutdown();
}
Thread.sleep(2000);
super.tearDown();
}
@Override
protected String getTestFile() {
return "testDeleteConf.xml";
}
@Override
protected String expandCommand(final String cmd) {
String expCmd = cmd;
expCmd = expCmd.replaceAll("NAMENODE", namenode);
expCmd = super.expandCommand(expCmd);
return expCmd;
}
@Override
protected Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
}
@Test
@Override
public void testAll () {
super.testAll();
}
}

View File

@ -47,7 +47,7 @@ public class TestHDFSCLI extends CLITestHelperDFS {
// Many of the tests expect a replication value of 1 in the output
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
// Build racks and hosts configuration to test dfsAdmin -printTopology
String [] racks = {"/rack1", "/rack1", "/rack2", "/rack2",
"/rack2", "/rack3", "/rack4", "/rack4" };
@ -95,7 +95,7 @@ public class TestHDFSCLI extends CLITestHelperDFS {
@Override
protected Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
}
@Test

View File

@ -87,7 +87,7 @@ public class TestXAttrCLI extends CLITestHelperDFS {
@Override
protected Result execute(CLICommand cmd) throws Exception {
return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
}
@Test

View File

@ -275,7 +275,8 @@ public class TestStorageRestore {
String cmd = "-fs NAMENODE -restoreFailedStorage false";
String namenode = config.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///");
CommandExecutor executor =
new CLITestCmdDFS(cmd, new CLICommandDFSAdmin()).getExecutor(namenode);
new CLITestCmdDFS(cmd,
new CLICommandDFSAdmin()).getExecutor(namenode, config);
executor.executeCommand(cmd);
restore = fsi.getStorage().getRestoreFailedStorage();

View File

@ -0,0 +1,83 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="testConf.xsl"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<configuration>
<!-- Normal mode is test. To run just the commands and dump the output
to the log, set it to nocompare -->
<mode>test</mode>
<!-- Comparator types:
ExactComparator
SubstringComparator
RegexpComparator
TokenComparator
-->
<tests>
<test> <!-- TESTED -->
<description>rm -r directory that meet warning criteria when -safely is not used</description>
<test-commands>
<command>-fs NAMENODE -mkdir /dir0</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/data15bytes</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/data30bytes</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data60bytes /dir0/data60bytes</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data120bytes /dir0/data120bytes</command>
<command>-fs NAMENODE -mkdir /dir0/dir00</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/dir00/data15bytes</command>
<command>-fs NAMENODE -mkdir /dir0/dir01</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/dir01/data30bytes</command>
<command>-fs NAMENODE -ls /dir0</command>
<command>-fs NAMENODE -rm -r /dir0</command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm -r /dir0</command>
</cleanup-commands>
<comparators>
<comparator>
<type>RegexpComparator</type>
<expected-output>Deleted /dir0</expected-output>
</comparator>
</comparators>
</test>
<test> <!-- TESTED -->
<description>rm -r directory that does not meet warning criteria when -safely is used</description>
<test-commands>
<command>-fs NAMENODE -mkdir /dir0</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/data15bytes</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/data30bytes</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data60bytes /dir0/data60bytes</command>
<command>-fs NAMENODE -mkdir /dir0/dir00</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/dir00/data15bytes</command>
<command>-fs NAMENODE -mkdir /dir0/dir01</command>
<command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/dir01/data30bytes</command>
<command>-fs NAMENODE -ls /dir0</command>
<command>-fs NAMENODE -rm -r -safely /dir0</command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm -r /dir0</command>
</cleanup-commands>
<comparators>
<comparator>
<type>RegexpComparator</type>
<expected-output>Deleted /dir0</expected-output>
</comparator>
</comparators>
</test>
</tests>
</configuration>

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.cli;
import org.apache.hadoop.cli.util.CLICommandTypes;
import org.apache.hadoop.cli.util.CLITestCmd;
import org.apache.hadoop.cli.util.CommandExecutor;
import org.apache.hadoop.conf.Configuration;
public class CLITestCmdMR extends CLITestCmd {
public CLITestCmdMR(String str, CLICommandTypes type) {
@ -34,7 +35,7 @@ public class CLITestCmdMR extends CLITestCmd {
* of the test method.
*/
@Override
public CommandExecutor getExecutor(String tag)
public CommandExecutor getExecutor(String tag, Configuration conf)
throws IllegalArgumentException {
throw new IllegalArgumentException("Method isn't supported");
}