Python配置celery执行异步任务,只要配置好以后可以应用到任何耗时任务当中,非常好用
tasks.py
#!/usr/bin/env python
#!-*-coding:utf-8 -*-
import time
from celery import Celery
celery=Celery("ShiChuang",broker="redis://127.0.0.1:6379/14",backend='redis://127.0.0.1:6379/15')
@celery.task
def work_func(x, y):
time.sleep(20)
print("2222222222222222222")
return x+y
# celery 启动命令
# celery -A ShiChuang.tasks.main worker --loglevel=info -P gevent
config.py
#!/usr/bin/env python
#!-*-coding:utf-8 -*-
#配置worker
BROKER_URL="redis://127.0.0.1:6379/14"
#配置backend
CELERY_RESULT_BACKEND="redis://127.0.0.1:6379/15"
# 每个worker执行了多少任务就会死掉
CELERYD_MAX_TASKS_PER_CHILD = 40
# 是否忽略celery的结果,如果设置为True,则不会把结果存储到redis[15]当中
CELERY_IGNORE_RESULT = False
main.py
#!/usr/bin/env python
#!-*-coding:utf-8 -*-
from celery import Celery
app=Celery("ShiChuang")
app.config_from_object("ShiChuang.tasks.config")
#让celery自己找到任务
app.autodiscover_tasks(["ShiChuang.tasks.tasks_tools"])
# 注意:ShiChuang是tasks上一级目录名
测试testing_celery.py脚本
#!/usr/bin/env python
#!-*-coding:utf-8 -*-
from celery.result import AsyncResult
from ShiChuang.tasks.tasks_tools.tasks import *
def run():
res=work_func.delay(10,10)
print(res.task_id)
print('hello world')
run()
# 根据task_id获取celery执行的结果
# result = AsyncResult('e70b8878-9555-4b7b-a29c-2b2b73678312') # 参数为task id
# print(result.result)