From d7f712cd4262f51ea2972596ce0a48cde623ecf9 Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Tue, 5 Jul 2011 17:28:27 +0000 Subject: [PATCH] HADOOP-7090. Fix resource leaks in s3.INode, BloomMapFile, WritableUtils and CBZip2OutputStream. Contributed by Uma Maheswara Rao G git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1143149 13f79535-47bb-0310-9956-ffa450edef68 --- common/CHANGES.txt | 3 +++ .../java/org/apache/hadoop/fs/s3/INode.java | 20 ++++++++++++------- .../org/apache/hadoop/io/BloomMapFile.java | 19 +++++++++++++----- .../org/apache/hadoop/io/WritableUtils.java | 9 +++++++-- .../io/compress/bzip2/CBZip2OutputStream.java | 11 ++++++++-- 5 files changed, 46 insertions(+), 16 deletions(-) diff --git a/common/CHANGES.txt b/common/CHANGES.txt index 69f961c115e..b46dd5e9af0 100644 --- a/common/CHANGES.txt +++ b/common/CHANGES.txt @@ -343,6 +343,9 @@ Trunk (unreleased changes) HADOOP-7437. IOUtils.copybytes will suppress the stream closure exceptions. (Uma Maheswara Rao G via szetszwo) + HADOOP-7090. Fix resource leaks in s3.INode, BloomMapFile, WritableUtils + and CBZip2OutputStream. (Uma Maheswara Rao G via szetszwo) + Release 0.22.0 - Unreleased INCOMPATIBLE CHANGES diff --git a/common/src/java/org/apache/hadoop/fs/s3/INode.java b/common/src/java/org/apache/hadoop/fs/s3/INode.java index f2945737eed..5d08b7750b7 100644 --- a/common/src/java/org/apache/hadoop/fs/s3/INode.java +++ b/common/src/java/org/apache/hadoop/fs/s3/INode.java @@ -27,6 +27,7 @@ import java.io.InputStream; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.IOUtils; /** * Holds file metadata including type (regular file, or directory), @@ -82,15 +83,20 @@ public class INode { public InputStream serialize() throws IOException { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(bytes); - out.writeByte(fileType.ordinal()); - if (isFile()) { - out.writeInt(blocks.length); - for (int i = 0; i < blocks.length; i++) { - out.writeLong(blocks[i].getId()); - out.writeLong(blocks[i].getLength()); + try { + out.writeByte(fileType.ordinal()); + if (isFile()) { + out.writeInt(blocks.length); + for (int i = 0; i < blocks.length; i++) { + out.writeLong(blocks[i].getId()); + out.writeLong(blocks[i].getLength()); + } } + out.close(); + out = null; + } finally { + IOUtils.closeStream(out); } - out.close(); return new ByteArrayInputStream(bytes.toByteArray()); } diff --git a/common/src/java/org/apache/hadoop/io/BloomMapFile.java b/common/src/java/org/apache/hadoop/io/BloomMapFile.java index ab68ce7f60b..1b3857e2596 100644 --- a/common/src/java/org/apache/hadoop/io/BloomMapFile.java +++ b/common/src/java/org/apache/hadoop/io/BloomMapFile.java @@ -31,7 +31,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.CompressionCodec; -import org.apache.hadoop.util.Options; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.bloom.DynamicBloomFilter; import org.apache.hadoop.util.bloom.Filter; @@ -187,9 +186,14 @@ public class BloomMapFile { public synchronized void close() throws IOException { super.close(); DataOutputStream out = fs.create(new Path(dir, BLOOM_FILE_NAME), true); - bloomFilter.write(out); - out.flush(); - out.close(); + try { + bloomFilter.write(out); + out.flush(); + out.close(); + out = null; + } finally { + IOUtils.closeStream(out); + } } } @@ -225,15 +229,20 @@ public class BloomMapFile { private void initBloomFilter(Path dirName, Configuration conf) { + + DataInputStream in = null; try { FileSystem fs = dirName.getFileSystem(conf); - DataInputStream in = fs.open(new Path(dirName, BLOOM_FILE_NAME)); + in = fs.open(new Path(dirName, BLOOM_FILE_NAME)); bloomFilter = new DynamicBloomFilter(); bloomFilter.readFields(in); in.close(); + in = null; } catch (IOException ioe) { LOG.warn("Can't open BloomFilter: " + ioe + " - fallback to MapFile."); bloomFilter = null; + } finally { + IOUtils.closeStream(in); } } diff --git a/common/src/java/org/apache/hadoop/io/WritableUtils.java b/common/src/java/org/apache/hadoop/io/WritableUtils.java index e0278b8aafc..db2dde781f6 100644 --- a/common/src/java/org/apache/hadoop/io/WritableUtils.java +++ b/common/src/java/org/apache/hadoop/io/WritableUtils.java @@ -62,8 +62,13 @@ public final class WritableUtils { if (bytes != null) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gzout = new GZIPOutputStream(bos); - gzout.write(bytes, 0, bytes.length); - gzout.close(); + try { + gzout.write(bytes, 0, bytes.length); + gzout.close(); + gzout = null; + } finally { + IOUtils.closeStream(gzout); + } byte[] buffer = bos.toByteArray(); int len = buffer.length; out.writeInt(len); diff --git a/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java index 392cf2c521d..3060eb924f4 100644 --- a/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java +++ b/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java @@ -27,6 +27,8 @@ package org.apache.hadoop.io.compress.bzip2; import java.io.OutputStream; import java.io.IOException; +import org.apache.hadoop.io.IOUtils; + /** * An output stream that compresses into the BZip2 format (without the file * header chars) into another stream. @@ -727,8 +729,13 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { public void close() throws IOException { if (out != null) { OutputStream outShadow = this.out; - finish(); - outShadow.close(); + try { + finish(); + outShadow.close(); + outShadow = null; + } finally { + IOUtils.closeStream(outShadow); + } } }