celery_server.py和mytasks.py在celery_demo目录下,
celery_demo目录下启动两个worker:
celery -A celery_server.myapp worker -l debug -Q default
celery -A celery_server.myapp worker -l debug -Q add_tasks
最后再运行mytasks.py
代码文件:
#celery_server.py
from __future__ import absolute_import
from celery import Celery
from celery.signals import before_task_publish, after_task_publish
from kombu import Queue
myapp = Celery('maintasks')
myapp.conf.broker_url = 'redis://localhost:6379/0'
myapp.conf.result_backend = 'redis://localhost:6379/1'
myapp.conf.imports = ['mytasks']
myapp.conf.worker_concurrency = 1
myapp.conf.task_routes = {
'add': 'add_tasks',
'default': 'default'
}
myapp.conf.task_queues = (
Queue('default', routing_key='default_key'),
Queue('add_tasks', routing_key='add_tasks_key'),
)
@before_task_publish.connect
def before_task_publish_handler(body=None, **kwargs):
with open('before_task_publish_handler', 'w') as f:
f.write('before_task_publish_handler')
@after_task_publish.connect
def after_task_publish_handler(body=None, **kwargs):
with open('after_task_publish_handler', 'w') as f:
f.write('after_task_publish_handler')
if __name__ == '__main__':
myapp.start()
#mytasks.py
import time
from celery.result import AsyncResult
from celery_server import myapp
@myapp.task(serializer='json',name='add')
def add(x, y):
time.sleep(10)
return x + y
def async_add():
print('async_add')
result = add.delay(4, 4)
return result.id
@myapp.task(serializer='json',name='default')
def default(x, y):
time.sleep(10)
return x + y
def async_default():
print('async_default')
result = default.delay(5, 5)
return result.id
def get_result(task_id):
res = AsyncResult(task_id)
while True:
print(res.status)
if res.ready():
break
time.sleep(1)
print(res.result)
# print(res.get())
if __name__ == '__main__':
add_task_id = async_add()
default_task_id = async_default()
get_result(add_task_id)
get_result(default_task_id)