diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index a650e768e6e..7d366ad491c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -652,6 +652,9 @@ Release 2.8.0 - UNRELEASED HDFS-9166. Move hftp / hsftp filesystem to hfds-client. (Mingliang Liu via wheat9) + HDFS-8696. Make the lower and higher watermark in the DN Netty server + configurable. (Xiaobing Zhou via wheat9) + OPTIMIZATIONS HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index a9097177e10..7821e5a3bf0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -63,6 +63,12 @@ public class DFSConfigKeys extends CommonConfigurationKeys { HdfsClientConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED_DEFAULT; public static final String DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT = HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT; + public static final String DFS_WEBHDFS_NETTY_LOW_WATERMARK = + "dfs.webhdfs.netty.low.watermark"; + public static final int DFS_WEBHDFS_NETTY_LOW_WATERMARK_DEFAULT = 32768; + public static final String DFS_WEBHDFS_NETTY_HIGH_WATERMARK = + "dfs.webhdfs.netty.high.watermark"; + public static final int DFS_WEBHDFS_NETTY_HIGH_WATERMARK_DEFAULT = 65535; // HA related configuration public static final String DFS_DATANODE_RESTART_REPLICA_EXPIRY_KEY = "dfs.datanode.restart.replica.expiration"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java index 0bcb1719b5f..85dd835c846 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java @@ -21,6 +21,7 @@ import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFactory; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; @@ -30,10 +31,12 @@ import io.netty.handler.codec.http.HttpRequestDecoder; import io.netty.handler.codec.http.HttpResponseEncoder; import io.netty.handler.ssl.SslHandler; import io.netty.handler.stream.ChunkedWriteHandler; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.datanode.BlockScanner; @@ -123,6 +126,18 @@ public class DatanodeHttpServer implements Closeable { conf, confForCreate)); } }); + + this.httpServer.childOption( + ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, + conf.getInt( + DFSConfigKeys.DFS_WEBHDFS_NETTY_HIGH_WATERMARK, + DFSConfigKeys.DFS_WEBHDFS_NETTY_HIGH_WATERMARK_DEFAULT)); + this.httpServer.childOption( + ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, + conf.getInt( + DFSConfigKeys.DFS_WEBHDFS_NETTY_LOW_WATERMARK, + DFSConfigKeys.DFS_WEBHDFS_NETTY_LOW_WATERMARK_DEFAULT)); + if (externalHttpChannel == null) { httpServer.channel(NioServerSocketChannel.class); } else {