From 945cb2ecaa110e7815ebdefd50dc8c8353f30cb3 Mon Sep 17 00:00:00 2001 From: Aaron Myers Date: Fri, 10 May 2013 21:45:32 +0000 Subject: [PATCH] HADOOP-9485. No default value in the code for hadoop.rpc.socket.factory.class.default. Contributed by Colin Patrick McCabe. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1481200 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 ++ .../fs/CommonConfigurationKeysPublic.java | 2 + .../java/org/apache/hadoop/net/NetUtils.java | 5 ++- .../org/apache/hadoop/ipc/TestSaslRPC.java | 4 +- .../apache/hadoop/ipc/TestSocketFactory.java | 5 ++- .../hdfs/TestDistributedFileSystem.java | 40 ++++++++++++++++--- 6 files changed, 50 insertions(+), 9 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 5212d23939f..7593a5b6b3c 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -707,6 +707,9 @@ Release 2.0.5-beta - UNRELEASED HADOOP-9549. WebHdfsFileSystem hangs on close(). (daryn via kihwal) + HADOOP-9485. No default value in the code for + hadoop.rpc.socket.factory.class.default. (Colin Patrick McCabe via atm) + Release 2.0.4-alpha - 2013-04-25 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 3a236cbc278..c4927e52a83 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -215,6 +215,8 @@ public class CommonConfigurationKeysPublic { /** See core-default.xml */ public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY = "hadoop.rpc.socket.factory.class.default"; + public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT = + "org.apache.hadoop.net.StandardSocketFactory"; /** See core-default.xml */ public static final String HADOOP_SOCKS_SERVER_KEY = "hadoop.socks.server"; /** See core-default.xml */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index 7ff9030f94a..8595f87b9e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -50,6 +50,7 @@ import org.apache.commons.net.util.SubnetUtils.SubnetInfo; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.security.SecurityUtil; @@ -112,7 +113,9 @@ public class NetUtils { */ public static SocketFactory getDefaultSocketFactory(Configuration conf) { - String propValue = conf.get("hadoop.rpc.socket.factory.class.default"); + String propValue = conf.get( + CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, + CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT); if ((propValue == null) || (propValue.length() == 0)) return SocketFactory.getDefault(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 158e48a9376..422869c4003 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.Client.ConnectionId; import org.apache.hadoop.net.NetUtils; @@ -378,7 +379,8 @@ public class TestSaslRPC { current.addToken(token); Configuration newConf = new Configuration(conf); - newConf.set("hadoop.rpc.socket.factory.class.default", ""); + newConf.set(CommonConfigurationKeysPublic. + HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, ""); newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_1); TestSaslProtocol proxy1 = null; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java index 3298f1e605c..c29d240ae35 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java @@ -27,6 +27,7 @@ import javax.net.SocketFactory; import junit.framework.Assert; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.StandardSocketFactory; import org.junit.Test; @@ -39,13 +40,13 @@ public class TestSocketFactory { int toBeCached1 = 1; int toBeCached2 = 2; Configuration conf = new Configuration(); - conf.set("hadoop.rpc.socket.factory.class.default", + conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "org.apache.hadoop.ipc.TestSocketFactory$DummySocketFactory"); final SocketFactory dummySocketFactory = NetUtils .getDefaultSocketFactory(conf); dummyCache.put(dummySocketFactory, toBeCached1); - conf.set("hadoop.rpc.socket.factory.class.default", + conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "org.apache.hadoop.net.StandardSocketFactory"); final SocketFactory defaultSocketFactory = NetUtils .getDefaultSocketFactory(conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index b5f88194153..9e2fd277b47 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -67,12 +67,21 @@ public class TestDistributedFileSystem { private boolean dualPortTesting = false; + private boolean noXmlDefaults = false; + private HdfsConfiguration getTestConfiguration() { - HdfsConfiguration conf = new HdfsConfiguration(); + HdfsConfiguration conf; + if (noXmlDefaults) { + conf = new HdfsConfiguration(false); + } else { + conf = new HdfsConfiguration(); + } if (dualPortTesting) { conf.set(DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, "localhost:0"); } + conf.setLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, 0); + return conf; } @@ -612,11 +621,32 @@ public class TestDistributedFileSystem { public void testAllWithDualPort() throws Exception { dualPortTesting = true; - testFileSystemCloseAll(); - testDFSClose(); - testDFSClient(); - testFileChecksum(); + try { + testFileSystemCloseAll(); + testDFSClose(); + testDFSClient(); + testFileChecksum(); + } finally { + dualPortTesting = false; + } } + + @Test + public void testAllWithNoXmlDefaults() throws Exception { + // Do all the tests with a configuration that ignores the defaults in + // the XML files. + noXmlDefaults = true; + + try { + testFileSystemCloseAll(); + testDFSClose(); + testDFSClient(); + testFileChecksum(); + } finally { + noXmlDefaults = false; + } + } + /** * Tests the normal path of batching up BlockLocation[]s to be passed to a