From 3d9ad8e3b60dd21db45466f4736abe6b1812b522 Mon Sep 17 00:00:00 2001 From: Jing Zhao Date: Thu, 30 Jan 2014 19:15:13 +0000 Subject: [PATCH] HDFS-5843. DFSClient.getFileChecksum() throws IOException if checksum is disabled. Contributed by Laurent Goujon. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1562927 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../hadoop/hdfs/server/datanode/DataXceiver.java | 5 +++-- .../org/apache/hadoop/hdfs/TestFSOutputSummer.java | 13 +++++++++++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index d665a41a288..0f0054fde97 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -304,6 +304,9 @@ Release 2.4.0 - UNRELEASED HDFS-5492. Port HDFS-2069 (Incorrect default trash interval in the docs) to trunk. (Akira Ajisaka via Arpit Agarwal) + HDFS-5843. DFSClient.getFileChecksum() throws IOException if checksum is + disabled. (Laurent Goujon via jing9) + Release 2.3.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java index 13b411e9ef6..da38b91ac13 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataXceiver.java @@ -655,8 +655,9 @@ class DataXceiver extends Receiver implements Runnable { final BlockMetadataHeader header = BlockMetadataHeader.readHeader(checksumIn); final DataChecksum checksum = header.getChecksum(); final int bytesPerCRC = checksum.getBytesPerChecksum(); - final long crcPerBlock = (metadataIn.getLength() - - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize(); + final long crcPerBlock = checksum.getChecksumSize() > 0 + ? (metadataIn.getLength() - BlockMetadataHeader.getHeaderSize())/checksum.getChecksumSize() + : 0; //compute block checksum final MD5Hash md5 = MD5Hash.digest(checksumIn); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java index 88a83715ff0..047f67f380a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFSOutputSummer.java @@ -71,7 +71,7 @@ public class TestFSOutputSummer { cleanupFile(name); } - /* create a file, write data with vairable amount of data */ + /* create a file, write data with variable amount of data */ private void writeFile3(Path name) throws Exception { FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt(IO_FILE_BUFFER_SIZE_KEY, 4096), @@ -103,6 +103,8 @@ public class TestFSOutputSummer { stm.readFully(0, actual); checkAndEraseData(actual, 0, expected, "Read Sanity Test"); stm.close(); + // do a sanity check. Get the file checksum + fileSys.getFileChecksum(name); } private void cleanupFile(Path name) throws IOException { @@ -112,13 +114,20 @@ public class TestFSOutputSummer { } /** - * Test write opeation for output stream in DFS. + * Test write operation for output stream in DFS. */ @Test public void testFSOutputSummer() throws Exception { + doTestFSOutputSummer("CRC32"); + doTestFSOutputSummer("CRC32C"); + doTestFSOutputSummer("NULL"); + } + + private void doTestFSOutputSummer(String checksumType) throws Exception { Configuration conf = new HdfsConfiguration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE); conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, BYTES_PER_CHECKSUM); + conf.set(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, checksumType); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(NUM_OF_DATANODES) .build();