协程_1链接:https://blog.csdn.net/weixin_42394170/article/details/95939620
asyncio
- python3.4开始引入的标准库,内置了对移步io的支持
- asyncio本身是一个消息循环,
- 步骤
- 创建消息循环
- 把协程导入
- 关闭
- 案例v07
- 案例v08-两个tasks
- 案例v09-得到多个网站
async and await
- 为了更好的表示异步io
- python3.5 开始引入
- 让coroutine代码更简洁
- 使用上,可以简单进行替换
- 可以把 @asyncio.coroutine 替换成async
- yield from 替换成await
- 案例v10
aiohttp
- 介绍
- asyncio实现单线程并发IO,在客户端用处不大
- 在服务器端可以asyncio+coroutine配合,因为http是io操作
- asyncio实现了TCP,UIDP,SSL等协议
- aiohttp是给予asyncio实现的HTTP框架
- pip install aiohttp
- 案例11
concurrent.futures
- python3新增的库
- 类似其他语言的线程池的概念
- 此模块利用multiprocessiong实现真正的平行计算
- 核心原理是:concurrent.futures会以子进程的形式,平行的运行多个python解释器,从而令python程序可以利用多核CPU来提升执行速度。
由于子进程与主解释器相分离,所以他们的全局解释器锁也是相互独立的。每个子进程都能够完整的使用一个CPU内核。 - concurrent.futures.Executor
- ThreadPoolExecutor
- ProcessPoolExecutor
- submit(fn, args, kwargs)
- fn:异步执行的函数
- args,kwargs:参数
# 官方死锁案例
import time
def wait_on_b():
time.sleep(5)
print(b.result()) #b不会完成,他一直在等待a的return结果
return 5
def wait_on_a():
time.sleep(5)
print(a.result()) #同理a也不会完成,他也是在等待b的结果
return 6
executor = ThreadPoolExecutor(max_workers=2)
a = executor.submit(wait_on_b)
b = executor.submit(wait_on_a)
-
案例v12
-
map(fn, *iterables, timeout=None)
-
跟map函数类似
-
函数需要异步执行
-
timeout: 超时时间
-
案例 v13
-
案例v14
-
起执行结果是list,数据需要从list中取出来
with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor: print(list(executor.map(sleeper, x)))
-
-
submit和map根据需要选一个即可
-
案例v15
-
Future
- 未来需要完成的任务
- future 实例由Excutor.submit创建
- 案例v16
**v07**
import asyncio
@asyncio.coroutine
def hello():
print("Hello world!")
# 异步调用asyncio.sleep(1):
print("Start......")
r = yield from asyncio.sleep(3)
print("Done....")
print("Hello again!")
# 获取EventLoop:
loop = asyncio.get_event_loop()
# 执行coroutine
loop.run_until_complete(hello())
loop.close()
**v08**
import threading
import asyncio
@asyncio.coroutine
def hello():
print('Hello world! (%s)' % threading.currentThread())
print('Start..... (%s)' % threading.currentThread())
yield from asyncio.sleep(10)
print('Done..... (%s)' % threading.currentThread())
print('Hello again! (%s)' % threading.currentThread())
loop = asyncio.get_event_loop()
tasks = [hello(), hello()]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
**v09**
import asyncio
@asyncio.coroutine
def wget(host):
print('wget %s...' % host)
connect = asyncio.open_connection(host, 80)
reader, writer = yield from connect
header = 'GET / HTTP/1.0\r\nHost: %s\r\n\r\n' % host
writer.write(header.encode('utf-8'))
yield from writer.drain()
while True:
line = yield from reader.readline()
if line == b'\r\n':
break
print('%s header > %s' % (host, line.decode('utf-8').rstrip()))
# Ignore the body, close the socket
writer.close()
loop = asyncio.get_event_loop()
tasks = [wget(host) for host in ['www.sina.com.cn', 'www.sohu.com', 'www.163.com']]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
**v10**
import threading
import asyncio
#@asyncio.coroutine
async def hello():
print('Hello world! (%s)' % threading.currentThread())
print('Start..... (%s)' % threading.currentThread())
await asyncio.sleep(10)
print('Done..... (%s)' % threading.currentThread())
print('Hello again! (%s)' % threading.currentThread())
loop = asyncio.get_event_loop()
tasks = [hello(), hello()]
loop.run_until_complete(asyncio.wait(tasks))
loop.close()
**v11**
import asyncio
from aiohttp import web
async def index(request):
await asyncio.sleep(0.5)
return web.Response(body=b'<h1>Index</h1>')
async def hello(request):
await asyncio.sleep(0.5)
text = '<h1>hello, %s!</h1>' % request.match_info['name']
return web.Response(body=text.encode('utf-8'))
async def init(loop):
app = web.Application(loop=loop)
app.router.add_route('GET', '/', index)
app.router.add_route('GET', '/hello/{name}', hello)
srv = await loop.create_server(app.make_handler(), '127.0.0.1', 8000)
print('Server started at http://127.0.0.1:8000...')
return srv
loop = asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
**v12**
from concurrent.futures import ThreadPoolExecutor
import time
def return_future(msg):
time.sleep(3)
return msg
# 创建一个线程池
pool = ThreadPoolExecutor(max_workers=2)
# 往线程池加入2个task
f1 = pool.submit(return_future, 'hello')
f2 = pool.submit(return_future, 'world')
print(f1.done())
time.sleep(3)
print(f2.done())
print(f1.result())
print(f2.result())
**v13**
import time,re
import os,datetime
from concurrent import futures
data = ['1','2']
def wait_on(argument):
print(argument)
time.sleep(2)
return "ok"
ex = futures.ThreadPoolExecutor(max_workers=2)
for i in ex.map(wait_on,data):
print(i)
**v14**
from concurrent.futures import ThreadPoolExecutor as Pool
#import requests
import urllib
from urllib import request
URLS = ['http://www.baidu.com', 'http://qq.com', 'http://sina.com']
def task(url, timeout=20):
#return requests.get(url, timeout=timeout)
return request.urlopen(url, timeout=timeout)
pool = Pool(max_workers=3)
results = pool.map(task, URLS)
import time
time.sleep(20)
for ret in results:
print('%s, %s' % (ret.url, len(ret.read())))
**v15**
import time,re,fcntl
import os,datetime
from concurrent import futures
count_list = list()
MinuteNum = 1
StartTime = datetime.datetime(2018, 5, 1, 19, 31, 0, 484870)
NowTime = datetime.datetime.now()
os.system(':>new.txt')
f_new = open('new.txt','a')
def conc(CountTimeFormat):
f = open('push_slave.stdout', 'r')
for line in f.readlines():
if re.search(CountTimeFormat,line):
#获得文件专用锁
fcntl.flock(f_new, fcntl.LOCK_EX)
f_new.writelines(line)
f_new.flush()
#释放文件锁
fcntl.flock(f_new, fcntl.LOCK_UN)
break
while 1:
AfterOneMinute = datetime.timedelta(minutes=MinuteNum)
CountTime = AfterOneMinute + StartTime
CountTimeFormat = CountTime.strftime('%Y-%m-%d %H:%M')
MinuteNum = MinuteNum+1
count_list.append(CountTimeFormat)
if CountTimeFormat == "2018-05-2 16:00":
break
def exec_cmd():
with futures.ProcessPoolExecutor(max_workers=24) as executor:
dict(( executor.submit(conc, times), times) for times in count_list)
if __name__ == '__main__':
exec_cmd()
f_new.close()
**v16**
from concurrent.futures import ThreadPoolExecutor as Pool
from concurrent.futures import as_completed
import requests
URLS = ['http://qq.com', 'http://sina.com', 'http://www.baidu.com', ]
def task(url, timeout=10):
return requests.get(url, timeout=timeout)
with Pool(max_workers=3) as executor:
future_tasks = [executor.submit(task, url) for url in URLS]
for f in future_tasks:
if f.running():
print('%s is running' % str(f))
for f in as_completed(future_tasks):
try:
ret = f.done()
if ret:
f_ret = f.result()
print('%s, done, result: %s, %s' % (str(f), f_ret.url, len(f_ret.content)))
except Exception as e:
f.cancel()
print(str(e))