diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 93feb433cb5..cad8df4e1ed 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -141,6 +141,9 @@ Release 2.4.0 - UNRELEASED HADOOP-10422. Remove redundant logging of RPC retry attempts. (cnauroth) + HADOOP-10425. LocalFileSystem.getContentSummary should not count crc files. + (szetszwo) + BREAKDOWN OF HADOOP-10184 SUBTASKS AND RELATED JIRAS HADOOP-10185. FileSystem API for ACLs. (cnauroth) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java index d45ecbbf11a..f0554252fed 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java @@ -374,11 +374,6 @@ public class FilterFileSystem extends FileSystem { } // path variants delegate to underlying filesystem - @Override - public ContentSummary getContentSummary(Path f) throws IOException { - return fs.getContentSummary(f); - } - @Override public long getDefaultBlockSize(Path f) { return fs.getDefaultBlockSize(f); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java index 0d3be9bac30..e9edcc889fc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java @@ -320,7 +320,7 @@ class ChRootedFileSystem extends FilterFileSystem { @Override public ContentSummary getContentSummary(Path f) throws IOException { - return super.getContentSummary(fullPath(f)); + return fs.getContentSummary(fullPath(f)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 1c61b59a66a..c0a26c4ba93 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -33,6 +33,7 @@ import static org.junit.Assert.*; import static org.junit.Assume.assumeTrue; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -203,12 +204,22 @@ public class TestLocalFileSystem { } @Test(timeout = 1000) - public void testMkdirs() throws IOException { + public void testCreateFileAndMkdirs() throws IOException { Path test_dir = new Path(TEST_ROOT_DIR, "test_dir"); - Path test_file = new Path(TEST_ROOT_DIR, "file1"); + Path test_file = new Path(test_dir, "file1"); assertTrue(fileSys.mkdirs(test_dir)); - writeFile(fileSys, test_file, 1); + final int fileSize = new Random().nextInt(1 << 20) + 1; + writeFile(fileSys, test_file, fileSize); + + { + //check FileStatus and ContentSummary + final FileStatus status = fileSys.getFileStatus(test_file); + Assert.assertEquals(fileSize, status.getLen()); + final ContentSummary summary = fileSys.getContentSummary(test_dir); + Assert.assertEquals(fileSize, summary.getLength()); + } + // creating dir over a file Path bad_dir = new Path(test_file, "another_dir");