From 1ba3ddbe6d8f617fd2fa4150e8a7be097dd83b4a Mon Sep 17 00:00:00 2001 From: Aaron Myers Date: Tue, 12 Jul 2011 02:18:30 +0000 Subject: [PATCH] HDFS-2132. Potential resource leak in EditLogFileOutputStream.close. (atm) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1145428 13f79535-47bb-0310-9956-ffa450edef68 --- hdfs/CHANGES.txt | 2 + .../namenode/EditLogFileOutputStream.java | 72 ++++++++++++------- .../namenode/TestEditLogFileOutputStream.java | 27 +++++++ 3 files changed, 76 insertions(+), 25 deletions(-) diff --git a/hdfs/CHANGES.txt b/hdfs/CHANGES.txt index 6e98189135a..5d711c918fd 100644 --- a/hdfs/CHANGES.txt +++ b/hdfs/CHANGES.txt @@ -818,6 +818,8 @@ Trunk (unreleased changes) reading only from a currently being written block. (John George via szetszwo) + HDFS-2132. Potential resource leak in EditLogFileOutputStream.close. (atm) + Release 0.22.0 - Unreleased INCOMPATIBLE CHANGES diff --git a/hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java b/hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java index bf196a23692..80114af29d1 100644 --- a/hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java +++ b/hdfs/src/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileOutputStream.java @@ -28,8 +28,11 @@ import java.util.zip.Checksum; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Writable; +import com.google.common.annotations.VisibleForTesting; + /** * An implementation of the abstract class {@link EditLogOutputStream}, which * stores edits in a local file. @@ -120,32 +123,41 @@ class EditLogFileOutputStream extends EditLogOutputStream { @Override public void close() throws IOException { - // close should have been called after all pending transactions - // have been flushed & synced. - // if already closed, just skip - if(bufCurrent != null) - { - int bufSize = bufCurrent.size(); - if (bufSize != 0) { - throw new IOException("FSEditStream has " + bufSize - + " bytes still to be flushed and cannot " + "be closed."); + try { + // close should have been called after all pending transactions + // have been flushed & synced. + // if already closed, just skip + if(bufCurrent != null) + { + int bufSize = bufCurrent.size(); + if (bufSize != 0) { + throw new IOException("FSEditStream has " + bufSize + + " bytes still to be flushed and cannot " + "be closed."); + } + bufCurrent.close(); + bufCurrent = null; } - bufCurrent.close(); - bufCurrent = null; - } - - if(bufReady != null) { - bufReady.close(); - bufReady = null; - } - - // remove the last INVALID marker from transaction log. - if (fc != null && fc.isOpen()) { - fc.truncate(fc.position()); - fc.close(); - } - if (fp != null) { - fp.close(); + + if(bufReady != null) { + bufReady.close(); + bufReady = null; + } + + // remove the last INVALID marker from transaction log. + if (fc != null && fc.isOpen()) { + fc.truncate(fc.position()); + fc.close(); + fc = null; + } + if (fp != null) { + fp.close(); + fp = null; + } + } finally { + IOUtils.cleanup(FSNamesystem.LOG, bufCurrent, bufReady, fc, fp); + bufCurrent = bufReady = null; + fc = null; + fp = null; } } @@ -225,4 +237,14 @@ class EditLogFileOutputStream extends EditLogOutputStream { File getFile() { return file; } + + @VisibleForTesting + public void setFileChannelForTesting(FileChannel fc) { + this.fc = fc; + } + + @VisibleForTesting + public FileChannel getFileChannelForTesting() { + return fc; + } } diff --git a/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileOutputStream.java b/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileOutputStream.java index 59b57911602..84a7392afe1 100644 --- a/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileOutputStream.java +++ b/hdfs/src/test/hdfs/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileOutputStream.java @@ -20,9 +20,11 @@ package org.apache.hadoop.hdfs.server.namenode; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; +import java.nio.channels.FileChannel; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.DU; @@ -31,6 +33,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.Test; +import org.mockito.Mockito; public class TestEditLogFileOutputStream { @@ -58,5 +61,29 @@ public class TestEditLogFileOutputStream { assertTrue("Edit log disk space used should be at least 257 blocks", 257 * 4096 <= new DU(editLog, conf).getUsed()); } + + @Test + public void testClose() throws IOException { + String errorMessage = "TESTING: fc.truncate() threw IOE"; + + File testDir = new File(System.getProperty("test.build.data", "/tmp")); + assertTrue("could not create test directory", testDir.exists() || testDir.mkdirs()); + File f = new File(testDir, "edits"); + assertTrue("could not create test file", f.createNewFile()); + EditLogFileOutputStream elos = new EditLogFileOutputStream(f, 0); + + FileChannel mockFc = Mockito.spy(elos.getFileChannelForTesting()); + Mockito.doThrow(new IOException(errorMessage)).when(mockFc).truncate(Mockito.anyLong()); + elos.setFileChannelForTesting(mockFc); + + try { + elos.close(); + fail("elos.close() succeeded, but should have thrown"); + } catch (IOException e) { + assertEquals("wrong IOE thrown from elos.close()", e.getMessage(), errorMessage); + } + + assertEquals("fc was not nulled when elos.close() failed", elos.getFileChannelForTesting(), null); + } }