import time
import asyncio
import aiohttp
start = time.time()
urls = ['https://www.baidu.com/ ','https://www.kugou.com/','https://www.bootcss.com/']
async def get_page(url):
async with aiohttp.ClientSession() as session:
# get()/post()都有
# headers,params,data,proxy='http://ip:port'
async with await session.get(url) as response:
# text()返回字符串形式的响应数据
# read()返回二进制形式的响应数据
# json()返回的就是json对象
# 攻取响应数据操作之前一定要使用await进行手动挂起
page_text = await response.text()
print(page_text)
tasks = []
for url in urls:
c = get_page(url)
task = asyncio.ensure_future(c)
tasks.append(task)
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))
print(time.time()-start)
多任务协程
最新推荐文章于 2024-07-16 05:33:21 发布