From e4adb191aabde2262562fe03ced16aabfbc49d98 Mon Sep 17 00:00:00 2001 From: Vinayakumar B Date: Mon, 29 Jun 2015 15:58:54 +0530 Subject: [PATCH] HADOOP-12119. hadoop fs -expunge does not work for federated namespace (Contributed by J.Andreina) (cherry picked from commit c815344e2e68d78f6587b65bc2db25e151aa4364) --- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../java/org/apache/hadoop/fs/shell/Delete.java | 17 ++++++++++++++--- .../java/org/apache/hadoop/fs/TestTrash.java | 14 ++++++++++++-- 3 files changed, 29 insertions(+), 5 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 9ee578502ed..30558f63070 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -395,6 +395,9 @@ Release 2.8.0 - UNRELEASED HADOOP-8151. Error handling in snappy decompressor throws invalid exceptions. (Matt Foley via harsh) + HADOOP-12119. hadoop fs -expunge does not work for federated namespace + (J.Andreina via vinayakumarb) + Release 2.7.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java index f882817e92e..40d94787d3c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java @@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.PathIsDirectoryException; import org.apache.hadoop.fs.PathIsNotDirectoryException; @@ -195,9 +196,19 @@ protected void processOptions(LinkedList args) throws IOException { @Override protected void processArguments(LinkedList args) throws IOException { - Trash trash = new Trash(getConf()); - trash.expunge(); - trash.checkpoint(); + FileSystem[] childFileSystems = + FileSystem.get(getConf()).getChildFileSystems(); + if (null != childFileSystems) { + for (FileSystem fs : childFileSystems) { + Trash trash = new Trash(fs, getConf()); + trash.expunge(); + trash.checkpoint(); + } + } else { + Trash trash = new Trash(getConf()); + trash.expunge(); + trash.checkpoint(); + } } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java index a675e30a0a9..9a9173332cf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java @@ -594,8 +594,18 @@ static class TestLFS extends LocalFileSystem { TestLFS() { this(new Path(TEST_DIR, "user/test")); } - TestLFS(Path home) { - super(); + TestLFS(final Path home) { + super(new RawLocalFileSystem() { + @Override + protected Path getInitialWorkingDirectory() { + return makeQualified(home); + } + + @Override + public Path getHomeDirectory() { + return makeQualified(home); + } + }); this.home = home; } @Override