diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 17ee336772a..5961d34911c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -428,6 +428,9 @@ Release 2.8.0 - UNRELEASED HDFS-7728. Avoid updating quota usage while loading edits. (Jing Zhao via wheat9) + HDFS-8150. Make getFileChecksum fail for blocks under construction + (J.Andreina via vinayakumarb) + Release 2.7.1 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index 216c8431867..60c53b02d22 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -1871,6 +1871,10 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory, if (null == blockLocations) { throw new FileNotFoundException("File does not exist: " + src); } + if (blockLocations.isUnderConstruction()) { + throw new IOException("Fail to get checksum, since file " + src + + " is under construction."); + } List locatedblocks = blockLocations.getLocatedBlocks(); final DataOutputBuffer md5out = new DataOutputBuffer(); int bytesPerCRC = -1; @@ -1890,6 +1894,10 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory, if (null == blockLocations) { throw new FileNotFoundException("File does not exist: " + src); } + if (blockLocations.isUnderConstruction()) { + throw new IOException("Fail to get checksum, since file " + src + + " is under construction."); + } locatedblocks = blockLocations.getLocatedBlocks(); refetchBlocks = false; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestGetFileChecksum.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestGetFileChecksum.java index 0e56ba7f05e..814261f7fe7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestGetFileChecksum.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestGetFileChecksum.java @@ -17,7 +17,12 @@ */ package org.apache.hadoop.hdfs; +import static org.junit.Assert.fail; + +import java.io.IOException; + import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.Path; import org.junit.After; @@ -67,6 +72,20 @@ public class TestGetFileChecksum { } } + @Test + public void testGetFileChecksumForBlocksUnderConstruction() { + try { + FSDataOutputStream file = dfs.create(new Path("/testFile")); + file.write("Performance Testing".getBytes()); + dfs.getFileChecksum(new Path("/testFile")); + fail("getFileChecksum should fail for files " + + "with blocks under construction"); + } catch (IOException ie) { + Assert.assertTrue(ie.getMessage().contains( + "Fail to get checksum, since file /testFile " + + "is under construction.")); + } + } @Test public void testGetFileChecksum() throws Exception { testGetFileChecksum(new Path("/foo"), BLOCKSIZE / 4); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotFileLength.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotFileLength.java index d53140f14ac..90e499d8a35 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotFileLength.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotFileLength.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.server.namenode.snapshot; import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.io.PrintStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -32,6 +33,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; @@ -111,8 +114,16 @@ public class TestSnapshotFileLength { // Append to the file. FSDataOutputStream out = hdfs.append(file1); // Nothing has been appended yet. All checksums should still be equal. - assertThat("file and snapshot checksums (open for append) are not equal", - hdfs.getFileChecksum(file1), is(snapChksum1)); + // HDFS-8150:Fetching checksum for file under construction should fail + try { + hdfs.getFileChecksum(file1); + fail("getFileChecksum should fail for files " + + "with blocks under construction"); + } catch (IOException ie) { + assertTrue(ie.getMessage().contains( + "Fail to get checksum, since file " + file1 + + " is under construction.")); + } assertThat("snapshot checksum (post-open for append) has changed", hdfs.getFileChecksum(file1snap1), is(snapChksum1)); try { @@ -122,8 +133,6 @@ public class TestSnapshotFileLength { assertThat("Wrong data size in snapshot.", dataFromSnapshot.length, is(origLen)); // Verify that checksum didn't change - assertThat("snapshot file checksum (pre-close) has changed", - hdfs.getFileChecksum(file1), is(snapChksum1)); assertThat("snapshot checksum (post-append) has changed", hdfs.getFileChecksum(file1snap1), is(snapChksum1)); } finally {