diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index a3664285a55..c3ea7a122df 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1425,6 +1425,9 @@ Release 2.8.0 - UNRELEASED initialization, because HftpFileSystem is missing. (Mingliang Liu via cnauroth) + HDFS-9249. NPE is thrown if an IOException is thrown in NameNode constructor. + (Wei-Chiu Chuang via Yongjun Zhang) + Release 2.7.3 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java index 5da1f01384a..3933c660143 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java @@ -209,7 +209,9 @@ public class BackupNode extends NameNode { // Abort current log segment - otherwise the NN shutdown code // will close it gracefully, which is incorrect. - getFSImage().getEditLog().abortCurrentLogSegment(); + if (namesystem != null) { + getFSImage().getEditLog().abortCurrentLogSegment(); + } // Stop name-node threads super.stop(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java index 6102bdc3d6b..5b86eeafe20 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java @@ -891,15 +891,24 @@ public class NameNode implements NameNodeStatusMXBean { haContext.writeUnlock(); } } catch (IOException e) { - this.stop(); + this.stopAtException(e); throw e; } catch (HadoopIllegalArgumentException e) { - this.stop(); + this.stopAtException(e); throw e; } this.started.set(true); } + private void stopAtException(Exception e){ + try { + this.stop(); + } catch (Exception ex) { + LOG.warn("Encountered exception when handling exception (" + + e.getMessage() + "):", ex); + } + } + protected HAState createHAState(StartupOption startOpt) { if (!haEnabled || startOpt == StartupOption.UPGRADE || startOpt == StartupOption.UPGRADEONLY) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java index bdc2b28b36b..9f31857c8b2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestBackupNode.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; @@ -30,7 +31,9 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.directory.api.ldap.aci.UserClass; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -46,6 +49,8 @@ import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import org.apache.log4j.Level; import org.junit.Before; @@ -124,6 +129,72 @@ public class TestBackupNode { Collections.singletonList((int)thisCheckpointTxId)); } + + /** + * Regression test for HDFS-9249. + * This test configures the primary name node with SIMPLE authentication, + * and configures the backup node with Kerberose authentication with + * invalid keytab settings. + * + * This configuration causes the backup node to throw a NPE trying to abort + * the edit log. + * */ + @Test + public void startBackupNodeWithIncorrectAuthentication() throws IOException { + Configuration c = new HdfsConfiguration(); + StartupOption startupOpt = StartupOption.CHECKPOINT; + String dirs = getBackupNodeDir(startupOpt, 1); + c.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://127.0.0.1:1234"); + c.set(DFSConfigKeys.DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY, "localhost:0"); + c.set(DFSConfigKeys.DFS_BLOCKREPORT_INITIAL_DELAY_KEY, "0"); + c.setInt(DFSConfigKeys.DFS_DATANODE_SCAN_PERIOD_HOURS_KEY, + -1); // disable block scanner + c.setInt(DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_TXNS_KEY, 1); + c.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, dirs); + c.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY, + "${" + DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY + "}"); + c.set(DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY, + "127.0.0.1:0"); + c.set(DFSConfigKeys.DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY, + "127.0.0.1:0"); + + NameNode nn; + try { + Configuration nnconf = new HdfsConfiguration(c); + DFSTestUtil.formatNameNode(nnconf); + nn = NameNode.createNameNode(new String[] {}, nnconf); + } catch (IOException e) { + LOG.info("IOException is thrown creating name node"); + throw e; + } + + c.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + c.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, ""); + + BackupNode bn = null; + try { + bn = (BackupNode)NameNode.createNameNode( + new String[] {startupOpt.getName()}, c); + assertTrue("Namesystem in BackupNode should be null", + bn.getNamesystem() == null); + fail("Incorrect authentication setting should throw IOException"); + } catch (IOException e) { + LOG.info("IOException thrown as expected", e); + } finally { + if (nn != null) { + nn.stop(); + } + if (bn != null) { + bn.stop(); + } + SecurityUtil.setAuthenticationMethod( + UserGroupInformation.AuthenticationMethod.SIMPLE, c); + // reset security authentication + UserGroupInformation.setConfiguration(c); + } + } + @Test public void testCheckpointNode() throws Exception { testCheckpoint(StartupOption.CHECKPOINT);