From 85c4158074e53d60c8682d76eaf952afbe499d67 Mon Sep 17 00:00:00 2001 From: Eli Collins Date: Thu, 5 Jan 2012 01:31:14 +0000 Subject: [PATCH] HADOOP-7949. Updated maxIdleTime default in the code to match core-default.xml. Contributed by Eli Collins git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1227423 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 +++ .../hadoop/fs/CommonConfigurationKeys.java | 2 +- .../fs/CommonConfigurationKeysPublic.java | 2 +- .../java/org/apache/hadoop/ipc/Client.java | 23 +++++++++++-------- .../java/org/apache/hadoop/ipc/Server.java | 23 ++++++++++++++----- .../org/apache/hadoop/ipc/TestSaslRPC.java | 12 ++++++---- 6 files changed, 44 insertions(+), 21 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index ce878835466..39deb70e404 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -241,6 +241,9 @@ Release 0.23.1 - Unreleased HADOOP-7948. Shell scripts created by hadoop-dist/pom.xml to build tar do not properly propagate failure. (cim_michajlomatijkiw via tucu) + HADOOP-7949. Updated maxIdleTime default in the code to match + core-default.xml (eli) + Release 0.23.0 - 2011-11-01 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java index 7c9b25c957b..cbd283cc195 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java @@ -51,7 +51,7 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic { /** How often does RPC client send pings to RPC server */ public static final String IPC_PING_INTERVAL_KEY = "ipc.ping.interval"; /** Default value for IPC_PING_INTERVAL_KEY */ - public static final int IPC_PING_INTERVAL_DEFAULT = 60000; + public static final int IPC_PING_INTERVAL_DEFAULT = 60000; // 1 min /** Enables pings from RPC client to the server */ public static final String IPC_CLIENT_PING_KEY = "ipc.client.ping"; /** Default value of IPC_CLIENT_PING_KEY */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 3ea4ed70c2f..401d07ab11c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -165,7 +165,7 @@ public class CommonConfigurationKeysPublic { public static final String IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY = "ipc.client.connection.maxidletime"; /** Default value for IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY */ - public static final int IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT = 10000; + public static final int IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT = 10000; // 10s /** See core-default.xml */ public static final String IPC_CLIENT_CONNECT_MAX_RETRIES_KEY = "ipc.client.connect.max.retries"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index a6c2b472825..e8d39dcef73 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -48,6 +48,8 @@ import org.apache.commons.logging.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.ipc.RpcPayloadHeader.*; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; @@ -87,8 +89,6 @@ public class Client { private SocketFactory socketFactory; // how to create sockets private int refCount = 1; - final static String PING_INTERVAL_NAME = "ipc.ping.interval"; - final static int DEFAULT_PING_INTERVAL = 60000; // 1 min final static int PING_CALL_ID = -1; /** @@ -98,7 +98,7 @@ public class Client { * @param pingInterval the ping interval */ final public static void setPingInterval(Configuration conf, int pingInterval) { - conf.setInt(PING_INTERVAL_NAME, pingInterval); + conf.setInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, pingInterval); } /** @@ -109,7 +109,8 @@ public class Client { * @return the ping interval */ final static int getPingInterval(Configuration conf) { - return conf.getInt(PING_INTERVAL_NAME, DEFAULT_PING_INTERVAL); + return conf.getInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, + CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT); } /** @@ -122,7 +123,7 @@ public class Client { * @return the timeout period in milliseconds. -1 if no timeout value is set */ final public static int getTimeout(Configuration conf) { - if (!conf.getBoolean("ipc.client.ping", true)) { + if (!conf.getBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true)) { return getPingInterval(conf); } return -1; @@ -1338,12 +1339,16 @@ public class Client { Class protocol, UserGroupInformation ticket, int rpcTimeout, Configuration conf) throws IOException { String remotePrincipal = getRemotePrincipal(conf, addr, protocol); - boolean doPing = conf.getBoolean("ipc.client.ping", true); + boolean doPing = + conf.getBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true); return new ConnectionId(addr, protocol, ticket, rpcTimeout, remotePrincipal, - conf.getInt("ipc.client.connection.maxidletime", 10000), // 10s - conf.getInt("ipc.client.connect.max.retries", 10), - conf.getBoolean("ipc.client.tcpnodelay", false), + conf.getInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, + CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT), + conf.getInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, + CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_DEFAULT), + conf.getBoolean(CommonConfigurationKeysPublic.IPC_CLIENT_TCPNODELAY_KEY, + CommonConfigurationKeysPublic.IPC_CLIENT_TCPNODELAY_DEFAULT), doPing, (doPing ? Client.getPingInterval(conf) : 0)); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index e40f688df47..9929b971feb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -62,6 +62,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; @@ -378,7 +379,9 @@ public abstract class Server { //-tion (for idle connections) ran private long cleanupInterval = 10000; //the minimum interval between //two cleanup runs - private int backlogLength = conf.getInt("ipc.server.listen.queue.size", 128); + private int backlogLength = conf.getInt( + CommonConfigurationKeysPublic.IPC_SERVER_LISTEN_QUEUE_SIZE_KEY, + CommonConfigurationKeysPublic.IPC_SERVER_LISTEN_QUEUE_SIZE_DEFAULT); public Listener() throws IOException { address = new InetSocketAddress(bindAddress, port); @@ -1708,12 +1711,18 @@ public abstract class Server { } else { this.readThreads = conf.getInt( CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_KEY, - CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_DEFAULT); + CommonConfigurationKeys.IPC_SERVER_RPC_READ_THREADS_DEFAULT); } this.callQueue = new LinkedBlockingQueue(maxQueueSize); - this.maxIdleTime = 2*conf.getInt("ipc.client.connection.maxidletime", 1000); - this.maxConnectionsToNuke = conf.getInt("ipc.client.kill.max", 10); - this.thresholdIdleConnections = conf.getInt("ipc.client.idlethreshold", 4000); + this.maxIdleTime = 2 * conf.getInt( + CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, + CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT); + this.maxConnectionsToNuke = conf.getInt( + CommonConfigurationKeysPublic.IPC_CLIENT_KILL_MAX_KEY, + CommonConfigurationKeysPublic.IPC_CLIENT_KILL_MAX_DEFAULT); + this.thresholdIdleConnections = conf.getInt( + CommonConfigurationKeysPublic.IPC_CLIENT_IDLETHRESHOLD_KEY, + CommonConfigurationKeysPublic.IPC_CLIENT_IDLETHRESHOLD_DEFAULT); this.secretManager = (SecretManager) secretManager; this.authorize = conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, @@ -1725,7 +1734,9 @@ public abstract class Server { this.port = listener.getAddress().getPort(); this.rpcMetrics = RpcMetrics.create(this); this.rpcDetailedMetrics = RpcDetailedMetrics.create(this.port); - this.tcpNoDelay = conf.getBoolean("ipc.server.tcpnodelay", false); + this.tcpNoDelay = conf.getBoolean( + CommonConfigurationKeysPublic.IPC_SERVER_TCPNODELAY_KEY, + CommonConfigurationKeysPublic.IPC_SERVER_TCPNODELAY_DEFAULT); // Create the responder here responder = new Responder(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 0b186a1eb15..5748304c068 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -40,6 +40,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.Client.ConnectionId; import org.apache.hadoop.net.NetUtils; @@ -311,14 +312,17 @@ public class TestSaslRPC { public void testPingInterval() throws Exception { Configuration newConf = new Configuration(conf); newConf.set(SERVER_PRINCIPAL_KEY, SERVER_PRINCIPAL_1); - conf.setInt(Client.PING_INTERVAL_NAME, Client.DEFAULT_PING_INTERVAL); + conf.setInt(CommonConfigurationKeys.IPC_PING_INTERVAL_KEY, + CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT); + // set doPing to true - newConf.setBoolean("ipc.client.ping", true); + newConf.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, true); ConnectionId remoteId = ConnectionId.getConnectionId( new InetSocketAddress(0), TestSaslProtocol.class, null, 0, newConf); - assertEquals(Client.DEFAULT_PING_INTERVAL, remoteId.getPingInterval()); + assertEquals(CommonConfigurationKeys.IPC_PING_INTERVAL_DEFAULT, + remoteId.getPingInterval()); // set doPing to false - newConf.setBoolean("ipc.client.ping", false); + newConf.setBoolean(CommonConfigurationKeys.IPC_CLIENT_PING_KEY, false); remoteId = ConnectionId.getConnectionId( new InetSocketAddress(0), TestSaslProtocol.class, null, 0, newConf); assertEquals(0, remoteId.getPingInterval());