From 78af36bb077d53a020746b72cd5d9af12940482e Mon Sep 17 00:00:00 2001 From: Uma Maheswara Rao G Date: Fri, 22 Nov 2013 16:14:37 +0000 Subject: [PATCH] Merge HDFS-5544. Adding Test case For Checking dfs.checksum.type as NULL value. Contributed by Sathish. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1544598 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 ++ .../hadoop/hdfs/TestFSOutputSummer.java | 21 +++++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 913d49fac8c..e5431d8c2d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -222,6 +222,8 @@ Release 2.2.1 - UNRELEASED HDFS-5344. Make LsSnapshottableDir as Tool interface implementation. (Sathish via umamahesh) + HDFS-5544. Adding Test case For Checking dfs.checksum type as NULL value. (Sathish via umamahesh) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java index a3b3f808eb9..88a83715ff0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java @@ -135,4 +135,25 @@ public class TestFSOutputSummer { cluster.shutdown(); } } + + @Test + public void TestDFSCheckSumType() throws Exception{ + Configuration conf = new HdfsConfiguration(); + conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); + conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, BYTES_PER_CHECKSUM); + conf.set(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, "NULL"); + MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) + .numDataNodes(NUM_OF_DATANODES) + .build(); + fileSys = cluster.getFileSystem(); + try { + Path file = new Path("try.dat"); + Random rand = new Random(seed); + rand.nextBytes(expected); + writeFile1(file); + } finally { + fileSys.close(); + cluster.shutdown(); + } + } }