diff --git a/hdfs/CHANGES.txt b/hdfs/CHANGES.txt
index 5eb0f326797..1c202211c5d 100644
--- a/hdfs/CHANGES.txt
+++ b/hdfs/CHANGES.txt
@@ -549,6 +549,9 @@ Trunk (unreleased changes)
HDFS-1977. Stop using StringUtils.stringifyException().
(Bharath Mundlapudi via jitendra)
+ HDFS-2131. Add new tests for the -overwrite/-f option in put and
+ copyFromLocal by HADOOP-7361. (Uma Maheswara Rao G via szetszwo)
+
OPTIMIZATIONS
HDFS-1458. Improve checkpoint performance by avoiding unnecessary image
diff --git a/hdfs/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml b/hdfs/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
index 8b4e6c8e75e..b79c0d077dc 100644
--- a/hdfs/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
+++ b/hdfs/src/test/hdfs/org/apache/hadoop/cli/testHDFSConf.xml
@@ -2670,6 +2670,24 @@
+
+ cp: putting file into an already existing destination with -f option(absolute path)
+
+ -fs NAMENODE -touchz /user/file0
+ -fs NAMENODE -cp -f CLITEST_DATA/data120bytes /user/file0
+ -fs NAMENODE -cat /user/file0
+
+
+ -fs NAMENODE -rm -r /user
+
+
+
+ RegexpComparator
+ 12345678901234
+
+
+
+
cp: copying directory to directory in hdfs:// path
@@ -4076,6 +4094,24 @@
+
+ put: putting file into an already existing destination with -f option(absolute path)
+
+ -fs NAMENODE -touchz /user/file0
+ -fs NAMENODE -put -f CLITEST_DATA/data120bytes /user/file0
+ -fs NAMENODE -cat /user/file0
+
+
+ -fs NAMENODE -rm -r /user
+
+
+
+ RegexpComparator
+ 12345678901234
+
+
+
+
put: putting file into an already existing destination(relative path)
@@ -4593,6 +4629,25 @@
+ copyFromLocal: copying file into an already existing destination with -f option(absolute path)
+
+ -fs NAMENODE -touchz /user/file0
+ -fs NAMENODE -copyFromLocal -f CLITEST_DATA/data120bytes /user/file0
+ -fs NAMENODE -cat /user/file0
+
+
+ -fs NAMENODE -rm -r /user
+
+
+
+ RegexpComparator
+ 12345678901234
+
+
+
+
+
+
copyFromLocal: copying file into an already existing destination(relative path)
-fs NAMENODE -touchz file0
diff --git a/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java b/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
index f39b428a1d9..d99bf04fcb3 100644
--- a/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
+++ b/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -1354,4 +1354,73 @@ public void testInvalidShell() throws Exception {
int res = admin.run(new String[] {"-refreshNodes"});
assertEquals("expected to fail -1", res , -1);
}
+
+ // force Copy Option is -f
+ public void testCopyCommandsWithForceOption() throws Exception {
+ Configuration conf = new Configuration();
+ MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)
+ .format(true).build();
+ FsShell shell = null;
+ FileSystem fs = null;
+ File localFile = new File("testFileForPut");
+ Path hdfsTestDir = new Path("ForceTestDir");
+ try {
+ fs = cluster.getFileSystem();
+ fs.mkdirs(hdfsTestDir);
+ localFile.createNewFile();
+ writeFile(fs, new Path("testFileForPut"));
+ shell = new FsShell();
+
+ // Tests for put
+ String[] argv = new String[] { "-put", "-f", localFile.getName(),
+ "ForceTestDir" };
+ int res = ToolRunner.run(shell, argv);
+ int SUCCESS = 0;
+ int ERROR = 1;
+ assertEquals("put -f is not working", SUCCESS, res);
+
+ argv = new String[] { "-put", localFile.getName(), "ForceTestDir" };
+ res = ToolRunner.run(shell, argv);
+ assertEquals("put command itself is able to overwrite the file", ERROR,
+ res);
+
+ // Tests for copyFromLocal
+ argv = new String[] { "-copyFromLocal", "-f", localFile.getName(),
+ "ForceTestDir" };
+ res = ToolRunner.run(shell, argv);
+ assertEquals("copyFromLocal -f is not working", SUCCESS, res);
+
+ argv = new String[] { "-copyFromLocal", localFile.getName(),
+ "ForceTestDir" };
+ res = ToolRunner.run(shell, argv);
+ assertEquals(
+ "copyFromLocal command itself is able to overwrite the file", ERROR,
+ res);
+
+ // Tests for cp
+ argv = new String[] { "-cp", "-f", localFile.getName(), "ForceTestDir" };
+ res = ToolRunner.run(shell, argv);
+ assertEquals("cp -f is not working", SUCCESS, res);
+
+ argv = new String[] { "-cp", localFile.getName(),
+ "ForceTestDir" };
+ res = ToolRunner.run(shell, argv);
+ assertEquals("cp command itself is able to overwrite the file", ERROR,
+ res);
+ } finally {
+ if (null != shell)
+ shell.close();
+
+ if (localFile.exists())
+ localFile.delete();
+
+ if (null != fs) {
+ fs.delete(hdfsTestDir, true);
+ fs.close();
+ }
+ cluster.shutdown();
+ }
+
+ }
+
}