python 对asnyc异步限流

因为对于一些请求如果是异步的话 会产生很大的并发连接量 需要限制一下不然对接api会有频率限制或者打挂了的情况

固定任务队列大小

import asyncio
import random

async def download(code):
    wait_time = random.randint(1, 3)
    print('downloading {} will take {} second(s)'.format(code, wait_time))
    # 发起请求阻塞的地方
    await asyncio.sleep(wait_time)  # I/O, context will switch to main function
    print('downloaded {}'.format(code))

async def main(loop):
    # 限制的大小
    no_concurrent = 3
    # 是装任务的集合 与上面的大小结合来限制大小
    dltasks = set()
    i = 0
    # 9是任务总数
    while i < 9:
        if len(dltasks) >= no_concurrent:
            # Wait for some download to finish before adding a new one
            _done, dltasks = await asyncio.wait(
                dltasks, return_when=asyncio.FIRST_COMPLETED)
        dltasks.add(loop.create_task(download(i)))
        i += 1
    # Wait for the remaining downloads to finish
    await asyncio.wait(dltasks)

当集合任务小于队列大小就加入 并有一个结果就返回一个 return_when=asyncio.FIRST_COMPLETED 会发现队列又出去一个就可以加入进去一个 然后加入来实现限制的方式

使用异步队列Queue来

# download() defined as above

async def download_worker(q):
    while True:
        # 从队列获取元素
        code = await q.get()
        await download(code)
        # 任务结束
        q.task_done()

async def main(loop):
    # 创建个异步队列
    q = asyncio.Queue()
    # 创建三个异步任务
    workers = [loop.create_task(download_worker(q)) for _ in range(3)]
    i = 0
    while i < 9:
        # 总数是9 不断放进去队列
        await q.put(i)
        i += 1
    # 阻塞 等待所有任务结束
    await q.join()  # wait for all tasks to be processed
    # 取消掉协程
    for worker in workers:
        worker.cancel()
    # 出现异常也要返回 并且等待所有任务完成  
    await asyncio.gather(*workers, return_exceptions=True)

还有一种使用async默认的 Semaphore

import asyncio
from random import randint

async def download(code):
    wait_time = randint(1, 3)
    print('downloading {} will take {} second(s)'.format(code, wait_time))
    await asyncio.sleep(wait_time)  # I/O, context will switch to main function
    print('downloaded {}'.format(code))

# sem是限制的并发数  
sem = asyncio.Semaphore(3)
with sem来限制  
async def safe_download(i):
    async with sem:  # semaphore limits num of simultaneous downloads
        return await download(i)
async def main():
    # 创建任务
    tasks = [
        asyncio.ensure_future(safe_download(i))  # creating task starts coroutine
        for i
        in range(9)
    ]
    # 等待所有任务完成
    await asyncio.gather(*tasks)  # await moment all downloads done


if __name__ ==  '__main__':
    loop = asyncio.get_event_loop()
    try:
        loop.run_until_complete(main())
    finally:
        loop.run_until_complete(loop.shutdown_asyncgens())
        loop.close()

封装成装饰器来限制

import asyncio
from functools import wraps
def request_concurrency_limit_decorator(limit=3):
    # Bind the default event loop 
    sem = asyncio.Semaphore(limit)

    def executor(func):
        @wraps(func)
        async def wrapper(*args, **kwargs):
            async with sem:
                return await func(*args, **kwargs)

        return wrapper

    return executor

# 使用
@request_concurrency_limit_decorator(limit=...)
async def download(...):
    ...

第三方库

还有一种是使用第三方库来操作

LIST_OF_URLS = ("http://www.google.com", "......")

pool = AioPool(size=3)
await pool.map(your_download_coroutine, LIST_OF_URLS)

https://github.com/gistart/asyncio-pool

参考 https://stackoverflow.com/questions/48483348/how-to-limit-concurrency-with-python-asyncio

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值