HADOOP-9485. No default value in the code for hadoop.rpc.socket.factory.class.default. Contributed by Colin Patrick McCabe.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1481208 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Aaron Myers 2013-05-10 21:49:58 +00:00
parent bd816a2739
commit f8b466a1f8
6 changed files with 50 additions and 9 deletions

View File

@ -152,6 +152,9 @@ Release 2.0.5-beta - UNRELEASED
HADOOP-9549. WebHdfsFileSystem hangs on close(). (daryn via kihwal) HADOOP-9549. WebHdfsFileSystem hangs on close(). (daryn via kihwal)
HADOOP-9485. No default value in the code for
hadoop.rpc.socket.factory.class.default. (Colin Patrick McCabe via atm)
Release 2.0.4-alpha - 2013-04-25 Release 2.0.4-alpha - 2013-04-25
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -213,6 +213,8 @@ public class CommonConfigurationKeysPublic {
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY = public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY =
"hadoop.rpc.socket.factory.class.default"; "hadoop.rpc.socket.factory.class.default";
public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT =
"org.apache.hadoop.net.StandardSocketFactory";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SOCKS_SERVER_KEY = "hadoop.socks.server"; public static final String HADOOP_SOCKS_SERVER_KEY = "hadoop.socks.server";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */

View File

@ -50,6 +50,7 @@ import org.apache.commons.net.util.SubnetUtils.SubnetInfo;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
@ -112,7 +113,9 @@ public class NetUtils {
*/ */
public static SocketFactory getDefaultSocketFactory(Configuration conf) { public static SocketFactory getDefaultSocketFactory(Configuration conf) {
String propValue = conf.get("hadoop.rpc.socket.factory.class.default"); String propValue = conf.get(
CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
CommonConfigurationKeysPublic.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT);
if ((propValue == null) || (propValue.length() == 0)) if ((propValue == null) || (propValue.length() == 0))
return SocketFactory.getDefault(); return SocketFactory.getDefault();

View File

@ -41,6 +41,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.Client.ConnectionId; import org.apache.hadoop.ipc.Client.ConnectionId;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
@ -378,7 +379,8 @@ public class TestSaslRPC {
current.addToken(token); current.addToken(token);
Configuration newConf = new Configuration(conf); Configuration newConf = new Configuration(conf);
newConf.set("hadoop.rpc.socket.factory.class.default", ""); newConf.set(CommonConfigurationKeysPublic.
HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY, "");
newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_1); newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_1);
TestSaslProtocol proxy1 = null; TestSaslProtocol proxy1 = null;

View File

@ -27,6 +27,7 @@ import javax.net.SocketFactory;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.StandardSocketFactory; import org.apache.hadoop.net.StandardSocketFactory;
import org.junit.Test; import org.junit.Test;
@ -39,13 +40,13 @@ public class TestSocketFactory {
int toBeCached1 = 1; int toBeCached1 = 1;
int toBeCached2 = 2; int toBeCached2 = 2;
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("hadoop.rpc.socket.factory.class.default", conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
"org.apache.hadoop.ipc.TestSocketFactory$DummySocketFactory"); "org.apache.hadoop.ipc.TestSocketFactory$DummySocketFactory");
final SocketFactory dummySocketFactory = NetUtils final SocketFactory dummySocketFactory = NetUtils
.getDefaultSocketFactory(conf); .getDefaultSocketFactory(conf);
dummyCache.put(dummySocketFactory, toBeCached1); dummyCache.put(dummySocketFactory, toBeCached1);
conf.set("hadoop.rpc.socket.factory.class.default", conf.set(CommonConfigurationKeys.HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY,
"org.apache.hadoop.net.StandardSocketFactory"); "org.apache.hadoop.net.StandardSocketFactory");
final SocketFactory defaultSocketFactory = NetUtils final SocketFactory defaultSocketFactory = NetUtils
.getDefaultSocketFactory(conf); .getDefaultSocketFactory(conf);

View File

@ -64,12 +64,21 @@ public class TestDistributedFileSystem {
private boolean dualPortTesting = false; private boolean dualPortTesting = false;
private boolean noXmlDefaults = false;
private HdfsConfiguration getTestConfiguration() { private HdfsConfiguration getTestConfiguration() {
HdfsConfiguration conf = new HdfsConfiguration(); HdfsConfiguration conf;
if (noXmlDefaults) {
conf = new HdfsConfiguration(false);
} else {
conf = new HdfsConfiguration();
}
if (dualPortTesting) { if (dualPortTesting) {
conf.set(DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, conf.set(DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
"localhost:0"); "localhost:0");
} }
conf.setLong(DFSConfigKeys.DFS_NAMENODE_MIN_BLOCK_SIZE_KEY, 0);
return conf; return conf;
} }
@ -585,11 +594,32 @@ public class TestDistributedFileSystem {
public void testAllWithDualPort() throws Exception { public void testAllWithDualPort() throws Exception {
dualPortTesting = true; dualPortTesting = true;
testFileSystemCloseAll(); try {
testDFSClose(); testFileSystemCloseAll();
testDFSClient(); testDFSClose();
testFileChecksum(); testDFSClient();
testFileChecksum();
} finally {
dualPortTesting = false;
}
} }
@Test
public void testAllWithNoXmlDefaults() throws Exception {
// Do all the tests with a configuration that ignores the defaults in
// the XML files.
noXmlDefaults = true;
try {
testFileSystemCloseAll();
testDFSClose();
testDFSClient();
testFileChecksum();
} finally {
noXmlDefaults = false;
}
}
/** /**
* Tests the normal path of batching up BlockLocation[]s to be passed to a * Tests the normal path of batching up BlockLocation[]s to be passed to a