import aiohttp
import asyncio
async def main():
url = 'https://www.httpbin.org/get'
timeout = aiohttp.ClientTimeout(total=1.5)
async with aiohttp.ClientSession(timeout=timeout) as session:
try:
async with session.get(url) as resp:
print(await resp.text())
except asyncio.TimeoutError:
print("超时了")
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(main())
借助aiohttp的Semaphore控制并发量
import aiohttp
import asyncio
concurrent = 5
url = 'https://www.baidu.com'
session = None
semaphore = asyncio.Semaphore(concurrent)
async def scrape_api(i):
async with semaphore:
print('爬取',url,i)
async with session.get(url) as resp:
await asyncio.sleep(1)
return await resp.text()
async def main():