From 1b401f6a734df4e23a79b3bd89c816a1fc0de574 Mon Sep 17 00:00:00 2001 From: Brahma Reddy Battula Date: Tue, 20 Dec 2016 20:54:03 +0530 Subject: [PATCH] HDFS-11263. ClassCastException when we use Bzipcodec for Fsimage compression. Contributed by Brahma Reddy Battula. --- .../hdfs/server/namenode/FSImageFormatProtobuf.java | 4 ++-- .../hadoop/hdfs/server/namenode/TestFSImage.java | 12 +++++++++--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java index 7a81f9ecf7f..22331fe0b33 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java @@ -40,6 +40,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import org.apache.hadoop.io.compress.CompressionOutputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -63,7 +64,6 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType; import org.apache.hadoop.hdfs.util.MD5FileUtils; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.compress.CompressionCodec; -import org.apache.hadoop.io.compress.CompressorStream; import org.apache.hadoop.util.LimitInputStream; import org.apache.hadoop.util.Time; @@ -417,7 +417,7 @@ public final class FSImageFormatProtobuf { private void flushSectionOutputStream() throws IOException { if (codec != null) { - ((CompressorStream) sectionOutputStream).finish(); + ((CompressionOutputStream) sectionOutputStream).finish(); } sectionOutputStream.flush(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImage.java index 0d8431d122d..f83927003d5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImage.java @@ -52,7 +52,6 @@ import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.StripedFileTestUtil; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; @@ -84,8 +83,15 @@ public class TestFSImage { public void testCompression() throws IOException { Configuration conf = new Configuration(); conf.setBoolean(DFSConfigKeys.DFS_IMAGE_COMPRESS_KEY, true); - conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY, - "org.apache.hadoop.io.compress.GzipCodec"); + setCompressCodec(conf, "org.apache.hadoop.io.compress.DefaultCodec"); + setCompressCodec(conf, "org.apache.hadoop.io.compress.GzipCodec"); + setCompressCodec(conf, "org.apache.hadoop.io.compress.BZip2Codec"); + setCompressCodec(conf, "org.apache.hadoop.io.compress.Lz4Codec"); + } + + private void setCompressCodec(Configuration conf, String compressCodec) + throws IOException { + conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY, compressCodec); testPersistHelper(conf); }