HDFS-11263. ClassCastException when we use Bzipcodec for Fsimage compression. Contributed by Brahma Reddy Battula.

(cherry picked from commit 1b401f6a73)
This commit is contained in:
Brahma Reddy Battula 2016-12-20 20:54:03 +05:30
parent a95acbcb6e
commit 7a92345f16
2 changed files with 11 additions and 4 deletions

View File

@ -40,6 +40,7 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -63,7 +64,6 @@ import org.apache.hadoop.hdfs.server.namenode.startupprogress.StepType;
import org.apache.hadoop.hdfs.util.MD5FileUtils;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressorStream;
import org.apache.hadoop.util.LimitInputStream;
import org.apache.hadoop.util.Time;
@ -413,7 +413,7 @@ public final class FSImageFormatProtobuf {
private void flushSectionOutputStream() throws IOException {
if (codec != null) {
((CompressorStream) sectionOutputStream).finish();
((CompressionOutputStream) sectionOutputStream).finish();
}
sectionOutputStream.flush();
}

View File

@ -65,8 +65,15 @@ public class TestFSImage {
public void testCompression() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(DFSConfigKeys.DFS_IMAGE_COMPRESS_KEY, true);
conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY,
"org.apache.hadoop.io.compress.GzipCodec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.DefaultCodec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.GzipCodec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.BZip2Codec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.Lz4Codec");
}
private void setCompressCodec(Configuration conf, String compressCodec)
throws IOException {
conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY, compressCodec);
testPersistHelper(conf);
}