1.yield实现协程
def consumer(name):
print('开始吃包子...')
while True:
print('[consumer]%s需要包子'%name)
bone = yield #接收send发送的数据
print('[%s]吃了%s个包子'%(name, bone))
def producer(obj1):
obj1.send(None) #必须先发送None
for i in range(1,4):
print('[producer]正在做%s个包子'%i)
obj1.send(i)
if __name__ == '__main__':
conn = consumer("张三")
producer(conn)
conn.close()
2.gevent实现协程
def foo():
print('running in foo')
gevent.sleep(1)
print('com back from bar in to foo')
def bar():
print('running in bar')
gevent.sleep(1)
print('com back from foo in to bar')
if __name__ == '__main__':
gevent.joinall([
gevent.spawn(foo),
gevent.spawn(bar),
])
3.线程函数同步与异步
def task(pid):
gevent.sleep(1)
print('task %s done'%pid)
def synchronous(): #同步一个线程执行函数
for i in range(1,10):
task(i)
def asynchronous(): #异步一个线程执行函数
threads = [gevent.spawn(task,i) for i in range(10)]
gevent.joinall(threads)
if __name__ == '__main__':
print('synchronous:')
synchronous() #同步执行时要等待执行完后再执行
print('asynchronous:')
asynchronous() #异步时遇到等待则会切换执行
4.爬虫异步IO阻塞切换
from urllib import request
import gevent,time
from gevent import monkey
monkey.patch_all() #将程序中所有IO操作做上标记使程序非阻塞状态
def url_request(url):
print('get:%s'%url)
resp = request.urlopen(url)
data = resp.read()
print('%s bytes received from %s'%(len(data),url))
if __name__ == '__main__':
async_time_start = time.time() #开始时间
gevent.joinall([
gevent.spawn(url_request,'https://www.python.org/'),
gevent.spawn(url_request,'https://www.nginx.org/'),
gevent.spawn(url_request,'https://www.ibm.com'),
])
print('haoshi:',time.time()-async_time_start) #总用时