From 54c40cbf49f2ebf4bbc1976279a6eba7a2c5fe23 Mon Sep 17 00:00:00 2001 From: Ayush Saxena Date: Sat, 17 Oct 2020 01:34:01 +0530 Subject: [PATCH] HADOOP-16878. FileUtil.copy() to throw IOException if the source and destination are the same (#2383) Contributed by Gabor Bota. --- .../main/java/org/apache/hadoop/fs/FileUtil.java | 6 ++++++ .../org/apache/hadoop/fs/TestFsShellCopy.java | 16 +++++++++++++++- .../hadoop/hdfs/TestDistributedFileSystem.java | 16 ++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 7bc93f9bf5d..73ca6e65216 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -398,6 +398,12 @@ public class FileUtil { Configuration conf) throws IOException { Path src = srcStatus.getPath(); dst = checkDest(src.getName(), dstFS, dst, overwrite); + + if (srcFS.makeQualified(src).equals(dstFS.makeQualified(dst))) { + throw new PathOperationException("Source (" + src + ") and destination " + + "(" + dst + ") are equal in the copy command."); + } + if (srcStatus.isDirectory()) { checkDependencies(srcFS, src, dstFS, dst); if (!dstFS.mkdirs(dst)) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java index 72ae296c957..117fad2924d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java @@ -34,6 +34,7 @@ import java.io.PrintStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Before; import org.junit.BeforeClass; @@ -175,7 +176,20 @@ public class TestFsShellCopy { checkPut(dirPath, targetDir, true); } - + @Test + public void testCopyBetweenFsEqualPath() throws Exception { + Path testRoot = new Path(testRootDir, "testPutFile"); + lfs.delete(testRoot, true); + lfs.mkdirs(testRoot); + + Path filePath = new Path(testRoot, "sameSourceTarget"); + lfs.create(filePath).close(); + final FileStatus status = lfs.getFileStatus(filePath); + LambdaTestUtils.intercept(PathOperationException.class, () -> + FileUtil.copy(lfs, status, lfs, filePath, false, true, conf) + ); + } + private void checkPut(Path srcPath, Path targetDir, boolean useWindowsPath) throws Exception { lfs.delete(targetDir, true); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index 628a618f4fe..3c903563f4d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -65,6 +65,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileSystem.Statistics.StatisticsData; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; @@ -75,6 +76,7 @@ import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; +import org.apache.hadoop.fs.PathOperationException; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.StorageStatistics.LongStatistic; import org.apache.hadoop.fs.StorageType; @@ -2090,4 +2092,18 @@ public class TestDistributedFileSystem { assertFalse(result.isSupported()); } } + + @Test + public void testCopyBetweenFsEqualPath() throws Exception { + Configuration conf = getTestConfiguration(); + try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()) { + cluster.waitActive(); + final DistributedFileSystem dfs = cluster.getFileSystem(); + Path filePath = new Path("/dir/file"); + dfs.create(filePath).close(); + FileStatus fstatus = dfs.getFileStatus(filePath); + LambdaTestUtils.intercept(PathOperationException.class, + () -> FileUtil.copy(dfs, fstatus, dfs, filePath, false, true, conf)); + } + } }