From e990904dd568a1d8f98efb55c1dd2d598ae4752b Mon Sep 17 00:00:00 2001 From: Arpit Agarwal Date: Fri, 26 Jan 2018 11:42:27 -0800 Subject: [PATCH] HDFS-13054. Handling PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by Nanda kumar. --- .../java/org/apache/hadoop/hdfs/DFSClient.java | 4 +++- .../hadoop/hdfs/protocol/ClientProtocol.java | 3 +++ .../hadoop/hdfs/TestDistributedFileSystem.java | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index f0769c12373..92bb99ee9d7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -82,6 +82,7 @@ import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.fs.QuotaUsage; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.StorageType; @@ -1620,7 +1621,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory, FileNotFoundException.class, SafeModeException.class, UnresolvedPathException.class, - SnapshotAccessControlException.class); + SnapshotAccessControlException.class, + PathIsNotEmptyDirectoryException.class); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java index fbef0373614..0d77037d9d8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java @@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.hdfs.AddBlockFlag; import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.ContentSummary; @@ -625,6 +626,8 @@ public interface ClientProtocol { * @throws org.apache.hadoop.fs.UnresolvedLinkException If src * contains a symlink * @throws SnapshotAccessControlException if path is in RO snapshot + * @throws PathIsNotEmptyDirectoryException if path is a non-empty directory + * and recursive is set to false * @throws IOException If an I/O error occurred */ @AtMostOnce diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index 823c747a050..072ee9fcfd1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -67,6 +67,7 @@ import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.StorageStatistics.LongStatistic; import org.apache.hadoop.fs.StorageType; @@ -571,6 +572,22 @@ public class TestDistributedFileSystem { in.close(); fs.close(); } + + { + // Test PathIsNotEmptyDirectoryException while deleting non-empty dir + FileSystem fs = cluster.getFileSystem(); + fs.mkdirs(new Path("/test/nonEmptyDir")); + fs.create(new Path("/tmp/nonEmptyDir/emptyFile")).close(); + try { + fs.delete(new Path("/tmp/nonEmptyDir"), false); + Assert.fail("Expecting PathIsNotEmptyDirectoryException"); + } catch (PathIsNotEmptyDirectoryException ex) { + // This is the proper exception to catch; move on. + } + Assert.assertTrue(fs.exists(new Path("/test/nonEmptyDir"))); + fs.delete(new Path("/tmp/nonEmptyDir"), true); + } + } finally { if (cluster != null) {cluster.shutdown();}