Put into all site.xmls and check system property is set

This commit is contained in:
stack 2020-03-04 08:40:26 -08:00
parent c3edceb6ae
commit c8c2a87505
7 changed files with 52 additions and 10 deletions

View File

@ -22,7 +22,6 @@ import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.hbase.thirdparty.io.netty.util.concurrent.DefaultThreadFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.Pair;
@ -31,10 +30,16 @@ import org.apache.hadoop.hbase.util.Pair;
*/
@InterfaceAudience.Private
class DefaultNettyEventLoopConfig {
/**
* Name of system property to set to change default netty eventloop pool size.
* Default is 0.
*/
public static final String HBASE_NETTY_EVENTLOOP_DEFAULT_POOL_KEY =
"hbase.netty.eventloop.default.pool";
public static final Pair<EventLoopGroup, Class<? extends Channel>> GROUP_AND_CHANNEL_CLASS = Pair
.<EventLoopGroup, Class<? extends Channel>> newPair(
new NioEventLoopGroup(0,
new DefaultThreadFactory("Default-IPC-NioEventLoopGroup", true, Thread.MAX_PRIORITY)),
NioSocketChannel.class);
new NioEventLoopGroup(Integer.getInteger(HBASE_NETTY_EVENTLOOP_DEFAULT_POOL_KEY, 0),
new DefaultThreadFactory("Default-IPC-NioEventLoopGroup", true,
Thread.NORM_PRIORITY)), NioSocketChannel.class);
}

View File

@ -58,8 +58,11 @@ public class NettyRpcClient extends AbstractRpcClient<NettyRpcConnection> {
.getEventLoopConfig(conf);
if (groupAndChannelClass == null) {
// Use our own EventLoopGroup.
this.group = new NioEventLoopGroup(0,
new DefaultThreadFactory("IPC-NioEventLoopGroup", true, Thread.MAX_PRIORITY));
int count =
Integer.getInteger(DefaultNettyEventLoopConfig.HBASE_NETTY_EVENTLOOP_DEFAULT_POOL_KEY, 0);
System.out.println("COUNT="+ count);
this.group = new NioEventLoopGroup(count,
new DefaultThreadFactory("IPC-NioEventLoopGroup", true, Thread.NORM_PRIORITY));
this.channelClass = NioSocketChannel.class;
this.shutdownGroupWhenClose = true;
} else {

View File

@ -229,6 +229,7 @@ public class HttpProxyExample {
channelGroup = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
serverChannel = new ServerBootstrap().group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class).childOption(ChannelOption.TCP_NODELAY, true)
.childOption(ChannelOption.SO_REUSEADDR, true)
.childHandler(new ChannelInitializer<Channel>() {
@Override

View File

@ -84,13 +84,15 @@ public class NettyRpcServer extends RpcServer {
eventLoopGroup = config.group();
channelClass = config.serverChannelClass();
} else {
eventLoopGroup = new NioEventLoopGroup(0,
eventLoopGroup = new NioEventLoopGroup(
Integer.getInteger(DefaultNettyEventLoopConfig.HBASE_NETTY_EVENTLOOP_DEFAULT_POOL_KEY, 0),
new DefaultThreadFactory("NettyRpcServer", true, Thread.MAX_PRIORITY));
channelClass = NioServerSocketChannel.class;
}
ServerBootstrap bootstrap = new ServerBootstrap().group(eventLoopGroup).channel(channelClass)
.childOption(ChannelOption.TCP_NODELAY, tcpNoDelay)
.childOption(ChannelOption.SO_KEEPALIVE, tcpKeepAlive)
.childOption(ChannelOption.SO_REUSEADDR, true)
.childHandler(new ChannelInitializer<Channel>() {
@Override

View File

@ -27,9 +27,7 @@ import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioServerSocketChannel;
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.hbase.thirdparty.io.netty.util.concurrent.DefaultThreadFactory;
import java.util.concurrent.ThreadFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
@ -38,7 +36,6 @@ import org.apache.yetus.audience.InterfaceAudience;
*/
@InterfaceAudience.Private
public class NettyEventLoopGroupConfig {
private final EventLoopGroup group;
private final Class<? extends ServerChannel> serverChannelClass;

View File

@ -158,4 +158,36 @@
<name>hbase.hconnection.threads.keepalivetime</name>
<value>3</value>
</property>
<property>
<name>hbase.netty.worker.count</name>
<value>5</value>
<description>Default is 0</description>
</property>
<property>
<name>hbase.hconnection.threads.max</name>
<value>6</value>
<description>Default is 256</description>
</property>
<property>
<name>hbase.htable.threads.max</name>
<value>6</value>
<description>Default is MAX_INTEGER</description>
</property>
<property>
<name>hbase.region.replica.replication.threads.max</name>
<value>10</value>
<description>Default is 256</description>
</property>
<property>
<name>dfs.datanode.handler.count</name>
<value>3</value>
<description>Default is 10</description>
</property>
<property>
<name>hbase.rest.threads.max</name>
<value>5</value>
<description>Default is 100</description>
</property>
</configuration>

View File

@ -586,6 +586,8 @@
<redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
<systemPropertyVariables>
<test.build.classes>${test.build.classes}</test.build.classes>
<!--For testing, set this do a low number: 5-->
<hbase.netty.eventloop.default.pool>5</hbase.netty.eventloop.default.pool>
</systemPropertyVariables>
<excludes>
<!-- users can add -D option to skip particular test classes