windows下 pip install celery 后 celery可以直接使用,而在centos6.x中celery命令无法直接运行,那么怎么解决呢?我用如下方式解决:
在 tasks.py中 加入 如下函数
代码:
from celery.bin import worker as celery_worker
from celery import Celery, platforms
from datetime import datetime, timedelta
from celery.schedules import crontab
import time
from celery.task import periodic_task
# 动态添加任务
from importlib import import_module, reload
platforms.C_FORCE_ROOT = True
broker = 'amqp://guest:guest@127.0.0.1:5672/'
backend = 'redis://127.0.0.1:6379/0'
celery = Celery('tasks', broker=broker, backend=backend)
celery.conf.CELERY_IMPORTS = ['task', 'task.all_task']
"""
celery.conf.update(
CELERYBEAT_SCHEDULE={
'perminute': {
'task': 'tasks.execute',
'schedule': timedelta(seconds=1),
'args': ['task.all_task.asyget'],
}
}
)
"""
def import_string(import_name):
import_name = str(import_name).replace(':', '.')
modules = import_name.split('.')
mod = import_module(modules[0])
for comp in modules[1:]:
if not hasattr(mod, comp):
reload(mod)
mod = getattr(mod, comp)
return mod
@celery.task
def execute(func, *args, **kwargs):
func = import_string(func)
return func(*args, **kwargs)
"""
@celery.task
def interval(func, seconds, args=(), task_id=None):
next_run_time = datetime.utcnow() + timedelta(seconds=seconds)
kwargs = dict(args=(func, seconds, args), eta=next_run_time)
if task_id is not None:
kwargs.update(task_id=task_id)
interval.apply_async(**kwargs)
func = import_string(func)
return func(*args)
"""
def worker_start():
worker = celery_worker.worker(app=celery)
worker.run(
broker=broker,
concurrency=4,
traceback=False,
loglevel='INFO',
)
if __name__ == '__main__':
worker_start()
启用 celery beat 命令:
运行 python beat.py -A tasks --loglevel=info 即可 运行celery beat 的命令
beat.py 代码:
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from functools import partial
from celery.platforms import detached, maybe_drop_privileges
from celery.bin.base import Command, daemon_options
__all__ = ['beat']
HELP = __doc__
class beat(Command):
"""Start the beat periodic task scheduler.
Examples:
.. code-block:: console
$ celery beat -l info
$ celery beat -s /var/run/celery/beat-schedule --detach
$ celery beat -S django
The last example requires the :pypi:`django-celery-beat` extension
package found on PyPI.
"""
doc = HELP
enable_config_from_cmdline = True
supports_args = False
def run(self, detach=False, logfile=None, pidfile=None, uid=None,
gid=None, umask=None, workdir=None, **kwargs):
if not detach:
maybe_drop_privileges(uid=uid, gid=gid)
kwargs.pop('app', None)
beat = partial(self.app.Beat,
logfile=logfile, pidfile=pidfile, **kwargs)
if detach:
with detached(logfile, pidfile, uid, gid, umask, workdir):
return beat().run()
else:
return beat().run()
def add_arguments(self, parser):
c = self.app.conf
bopts = parser.add_argument_group('Beat Options')
bopts.add_argument('--detach', action='store_true', default=False)
bopts.add_argument(
'-s', '--schedule', default=c.beat_schedule_filename)
bopts.add_argument('--max-interval', type=float)
bopts.add_argument('-S', '--scheduler')
bopts.add_argument('-l', '--loglevel', default='WARN')
daemon_options(parser, default_pidfile='celerybeat.pid')
user_options = self.app.user_options['beat']
if user_options:
uopts = parser.add_argument_group('User Options')
self.add_compat_options(uopts, user_options)
def main(app=None):
beat(app=app).execute_from_commandline()
if __name__ == '__main__': # pragma: no cover
main()
运行 celery worker : python tasks.py
运行 celery beat : python beat.py -A tasks --loglevel=info
这样就解决了 centos 6.x 中 celery worker 和 beat 命令无效的问题 ,其他的命令看自己需求来写