gevent是第三方库,通过greenlet实现协程,其基本思想是:
当一个greenlet遇到IO操作时,比如访问网络,就自动切换到其他的greenlet,等到IO操作完成,再在适当的时候切换回来继续执行。由于IO操作非常耗时,经常使程序处于等待状态,有了gevent为我们自动切换协程,就保证总有greenlet在运行,而不是等待IO。
由于切换是在IO操作时自动完成,所以gevent需要修改Python自带的一些标准库,这一过程在启动时通过monkey patch完成:
# -*- coding: utf-8 -*-
'''
Created on 2016/6/30
@author: wwhhff11
'''
from gevent import monkey
import gevent
monkey.patch_socket()
"""
线程数只有1个
"""
def f(n):
for i in range(n):
print gevent.getcurrent(), i
gevent.sleep(0)
g1 = gevent.spawn(f, 5)
g2 = gevent.spawn(f, 5)
g3 = gevent.spawn(f, 5)
g1.join()
g2.join()
g3.join()
# -*- coding: utf-8 -*-
'''
Created on 2016/6/30
@author: wwhhff11
'''
from gevent import monkey
import gevent
import urllib2
from time import time
from gevent.pool import Pool
from gevent.threadpool import ThreadPool
monkey.patch_all()
def f(url):
print 'GET: %s' % url
respone = urllib2.urlopen(url)
data = respone.read()
print '%d bytes received from %s.' % (len(data), url)
start=time()
# 7.55209708214
gevent.joinall([gevent.spawn(f,"http://www.qq.com") for i in xrange(100)])
# 35.4669699669
# for i in xrange(100):
# f("http://www.qq.com")
end=time()
print end-start
# -*- coding: utf-8 -*-
'''
Created on 2016/6/30
@author: wwhhff11
'''
from gevent import monkey
import gevent
import urllib2
from time import time
from gevent.pool import Pool
from gevent.threadpool import ThreadPool
from gevent.queue import JoinableQueue, Empty
queue = JoinableQueue()
STOP="stop"
monkey.patch_all()
def html_reader():
while True:
try:
url=queue.get(0)
print 'GET: %s' % url
respone = urllib2.urlopen(url)
data = respone.read()
print '%d bytes received from %s.' % (len(data), url)
except Empty:
break
start=time()
readers = [gevent.spawn(html_reader) for i in xrange(10)]
for i in range(100):
queue.put("http://www.qq.com")
gevent.joinall(readers)
end=time()
print end-start
# -*- coding: utf-8 -*-
'''
Created on 2016/6/30
@author: wwhhff11
'''
from gevent import monkey
import gevent
import urllib2
from time import time
from gevent.pool import Pool
from gevent.threadpool import ThreadPool
from gevent.queue import JoinableQueue, Empty
queue = JoinableQueue()
STOP="stop"
monkey.patch_all()
def html_reader():
while True:
try:
url=queue.get(0)
print 'GET: %s' % url
respone = urllib2.urlopen(url)
data = respone.read()
print '%d bytes received from %s.' % (len(data), url)
except Empty:
break
start=time()
pool=ThreadPool(10)
for i in range(100):
queue.put("http://www.qq.com")
for i in xrange(10):
pool.spawn(html_reader)
pool.join()
end=time()
print end-start
html_reader个数为1->33s
html_reader个数为10->6s