active953
java.lang.IllegalStateException: failed to create a child event loop
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:99)
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:68)
at io.netty.channel.MultithreadEventLoopGroup.<init>(MultithreadEventLoopGroup.java:49)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:100)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:71)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:56)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:41)
at com.bimatrix.revit.nettyTest.TcpClient.getBootstrap(TcpClient.java:35)
at com.bimatrix.revit.nettyTest.TcpClient.run(TcpClient.java:97)
at com.bimatrix.revit.controller.RevitController$1.run(RevitController.java:194)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: io.netty.channel.ChannelException: failed to open a new selector
at io.netty.channel.nio.NioEventLoop.openSelector(NioEventLoop.java:126)
at io.netty.channel.nio.NioEventLoop.<init>(NioEventLoop.java:118)
at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:140)
at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:31)
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:95)
... 12 more
Caused by: java.io.IOException: Unable to establish loopback connection
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:125)
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:69)
at java.security.AccessController.doPrivileged(Native Method)
at sun.nio.ch.PipeImpl.<init>(PipeImpl.java:141)
at sun.nio.ch.SelectorProviderImpl.openPipe(SelectorProviderImpl.java:50)
at java.nio.channels.Pipe.open(Pipe.java:150)
at sun.nio.ch.WindowsSelectorImpl.<init>(WindowsSelectorImpl.java:127)
at sun.nio.ch.WindowsSelectorProvider.openSelector(WindowsSelectorProvider.java:44)
at io.netty.channel.nio.NioEventLoop.openSelector(NioEventLoop.java:124)
... 16 more
Caused by: java.net.SocketException: No buffer space available (maximum connections reached?): connect
at sun.nio.ch.Net.connect0(Native Method)
at sun.nio.ch.Net.connect(Net.java:465)
at sun.nio.ch.Net.connect(Net.java:457)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:670)
at java.nio.channels.SocketChannel.open(SocketChannel.java:184)
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:97)
... 24 more
java.lang.IllegalStateException: failed to create a child event loop
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:99)
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:68)
at io.netty.channel.MultithreadEventLoopGroup.<init>(MultithreadEventLoopGroup.java:49)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:100)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:71)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:56)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:41)
at com.bimatrix.revit.nettyTest.TcpClient.getBootstrap(TcpClient.java:35)
at com.bimatrix.revit.nettyTest.TcpClient.run(TcpClient.java:97)
at com.bimatrix.revit.controller.RevitController$1.run(RevitController.java:194)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: io.netty.channel.ChannelException: failed to open a new selector
at io.netty.channel.nio.NioEventLoop.openSelector(NioEventLoop.java:126)
at io.netty.channel.nio.NioEventLoop.<init>(NioEventLoop.java:118)
at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:140)
at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:31)
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:95)
... 12 more
Caused by: java.io.IOException: Unable to establish loopback connection
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:125)
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:69)
at java.security.AccessController.doPrivileged(Native Method)
at sun.nio.ch.PipeImpl.<init>(PipeImpl.java:141)
at sun.nio.ch.SelectorProviderImpl.openPipe(SelectorProviderImpl.java:50)
at java.nio.channels.Pipe.open(Pipe.java:150)
at sun.nio.ch.WindowsSelectorImpl.<init>(WindowsSelectorImpl.java:127)
at sun.nio.ch.WindowsSelectorProvider.openSelector(WindowsSelectorProvider.java:44)
at io.netty.channel.nio.NioEventLoop.openSelector(NioEventLoop.java:124)
... 16 more
Caused by: java.net.SocketException: No buffer space available (maximum connections reached?): connect
at sun.nio.ch.Net.connect0(Native Method)
at sun.nio.ch.Net.connect(Net.java:465)
at sun.nio.ch.Net.connect(Net.java:457)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:670)
at java.nio.channels.SocketChannel.open(SocketChannel.java:184)
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:97)
... 24 more
java.lang.IllegalStateException: failed to create a child event loop
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:99)
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:68)
at io.netty.channel.MultithreadEventLoopGroup.<init>(MultithreadEventLoopGroup.java:49)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:100)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:71)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:56)
at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:41)
at com.bimatrix.revit.nettyTest.TcpClient.getBootstrap(TcpClient.java:35)
at com.bimatrix.revit.nettyTest.TcpClient.run(TcpClient.java:97)
at com.bimatrix.revit.controller.RevitController$1.run(RevitController.java:194)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: io.netty.channel.ChannelException: failed to open a new selector
at io.netty.channel.nio.NioEventLoop.openSelector(NioEventLoop.java:126)
at io.netty.channel.nio.NioEventLoop.<init>(NioEventLoop.java:118)
at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:140)
at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:31)
at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:95)
... 12 more
Caused by: java.io.IOException: Unable to establish loopback connection
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:125)
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:69)
at java.security.AccessController.doPrivileged(Native Method)
at sun.nio.ch.PipeImpl.<init>(PipeImpl.java:141)
at sun.nio.ch.SelectorProviderImpl.openPipe(SelectorProviderImpl.java:50)
at java.nio.channels.Pipe.open(Pipe.java:150)
at sun.nio.ch.WindowsSelectorImpl.<init>(WindowsSelectorImpl.java:127)
at sun.nio.ch.WindowsSelectorProvider.openSelector(WindowsSelectorProvider.java:44)
at io.netty.channel.nio.NioEventLoop.openSelector(NioEventLoop.java:124)
... 16 more
Caused by: java.net.SocketException: No buffer space available (maximum connections reached?): connect
at sun.nio.ch.Net.connect0(Native Method)
at sun.nio.ch.Net.connect(Net.java:465)
at sun.nio.ch.Net.connect(Net.java:457)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:670)
at java.nio.channels.SocketChannel.open(SocketChannel.java:184)
at sun.nio.ch.PipeImpl$Initializer.run(PipeImpl.java:97)
... 24 more
socket连接数 不够
从上面的分析来看,导致出现 No buffer space available 这一问题的原因是多方面的,原因以及解决办法如下:
l 从代码层面上看, webservice 或 httpclient 调用未进行连接释放,导致资源无法回收 。
解决办法是在 axis2 的客户端代码中进行连接关闭,如下:
stub._getServiceClient().cleanupTransport();
stub._getServiceClient().cleanup();
stub.cleanup();
stub = null;
及时的关闭和clean 能有效的避免内存溢出的问题,及时回收资源。
或者 httpClient 中,最终要在 finally 调用 response.close()或者httpPost.releaseConnection() 进行连接释放。
l 从系统层面上看,系统 socket 连接数设置不合理, socket 连接数过小,易达到上限;其次是 2MSL 设置过长,容易积压 TIME_WAIT状态的 TCP 连接 。
解决办法是修改 Linux 内核参数,
修改系统 s ocket 最大连接数 ,在文件 /etc/security/limits.conf 最后加入下面两行:
* soft nofile 32768
* hard nofile 32768
或者缩小 2MSL 的时长、允许重用处于 TIME_WAIT状态的 TCP 连接、快速回收处于 TIME_WAIT状态的 TCP 连接,修改 /etc/sysctl.conf,添加如下几行:
# 改系統默认的 TIMEOUT 时间
net.ipv4.tcp_fin_timeout=2
# 启重用 ,允许将 TIME_WAIT sockets 重新用于新的 TCP 连接 默认为 0 表示关闭
net.ipv4.tcp_tw_reuse=1
# 开启 TCP 连接中 TIME_WAIT sockets 的快速回收 默认为 0 表示关闭
net.ipv4.tcp_tw_recycle=1
对于 windows 环境,可通过修改注册表进行配置:
\HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\Tcpip\Parameters
添加一个 DWORD 类型的值 TcpTimedWaitDelay ,值可以根据实际情况配置。
\HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\TCPIP\Parameters
添加一个 DWORD 类型的值 MaxUserPort ,值可以根据实际情况配置。
上面这些参数根据实际情况进行配置。
l 从LVS 层面上看,调度算法不合理,导致请求过多分配到某一台服务器上 。
解决办法,根据实际情况指定合理的负载均衡解决方案。
l 从安全层面上看,当服务器遭到DDoS(拒绝服务攻击)时,服务器大量积压TIME_WAIT状态的 TCP 连接而无法向外提供服务 。
解决办法,加强安全防护。
ps 我的问题解决是通过 修改windows注册表解决
注意是在Parameters 当前节点下面添加 参数 不要再子节点下面加