关于Django win10 Celery 使用
1.使用版本和
Django==2.2.16
celery==5.0.5
django-cors-headers==3.7.0
django_celery_beat==2.2.0
django-celery-results==2.2.0
django-redis==4.12.1
2.目录结构
├── application
│ ├── celery.py #使用文件
│ ├── __init__.py
│ ├── __pycache__
│ ├── settings.py #使用文件
│ ├── urls.py
│ ├── uwsgi.ini
│ └── wsgi.py
├── apps
│ ├── __init__.py
│ ├── __pycache__
│ └── web
├── conf
│ ├── env.py
├── __init__.py
├── manage.py
├── requirements.txt
celery.py #文件内容
import os
import django
from celery import Celery, platforms
# from celery.schedules import crontab
from django.conf import settings
from datetime import timedelta
os.environ.setdefault('DJANGO_SETTINGS_MODULE', "application.settings")
django.setup()
os.environ.setdefault('FORKED_BY_MULTIPROCESSING', '1')
app = Celery(f"dj_celery", broker=settings.BROKER_URL,backend=settings.BROKER_URL) # 初始化__init__py的对象名称
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.beat_scheduler = 'django_celery_beat.schedulers:DatabaseScheduler'
platforms.C_FORCE_ROOT = True
CELERY_WORKER_CONCURRENCY = 20
# 任务预取功能,就是每个工作的进程/线程在获取任务的时候,会尽量多拿 n 个,以保证获取的通讯成本可以压缩。
CELERYD_PREFETCH_MULTIPLIER = 20
# 非常重要,有些情况下可以防止死锁
CELERYD_FORCE_EXECV = True
# celery 的 worker 执行多少个任务后进行重启操作
CELERY_WORKER_MAX_TASKS_PER_CHILD = 100
# 禁用所有速度限制,如果网络资源有限,不建议开足马力。
CELERY_DISABLE_RATE_LIMITS = True
CELERY_ENABLE_UTC = False
CELERY_TIMEZONE = settings.TIME_ZONE
DJANGO_CELERY_BEAT_TZ_AWARE = False
app.conf.beat_schedule = {
# 'add-every-60-seconds': {
# 'task': 'apps.web.wcelery.tasks.notice_celery_delete',
# 'schedule': timedelta(seconds=10),
# 'args': (),
# },
}
# __init__.py 文件内容
from .celery import app
__all__ = ['app'] # app 就是celery中的app对象
# env.py 文件内容
# ================================================= #
# ************** redis 数据库配置 ************** #
# ================================================= #
# 是否启用Redis缓存
# 注:不使用redis则无法使用celery
REDIS_ENABLE = True
REDIS_DB = 1
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
REDIS_PASSWORD = ''
# celery 定时任务redis 库号
CELERY_DB = 1
# settings.py # 文件内容
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'baby_app.apps.BabyAppConfig',
'bootstrap3',
'DjangoUeditor', #富文本编辑器
'moment',
'rest_framework',
'rest_framework_swagger',
'django_filters',
'django_celery_beat', # 添加
'django_celery_results', # 添加
]
# 配置redis缓存
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://47.93.218.26:6379/1",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
BROKER_URL = f'redis://:{REDIS_PASSWORD if REDIS_PASSWORD else ""}@{os.getenv("REDIS_HOST") or REDIS_HOST}:' \
f'{REDIS_PORT}/{locals().get("CELERY_DB", 2)}' # Broker使用Redis
CELERYBEAT_SCHEDULER = 'django_celery_beat.schedulers.DatabaseScheduler' # Backend数据库
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERY_ENABLE_UTC = False
CELERY_WORKER_CONCURRENCY = 4 # 并发的worker数量
CELERY_ACKS_LATE = True
DJANGO_CELERY_BEAT_TZ_AWARE = False
# DJANGO_CELERY_BEAT_TZ_AWARE = True
CELERY_WORKER_MAX_TASKS_PER_CHILD = 5 # 每个worker最多执行的任务数, 可防止内存泄漏
CELERY_TASK_TIME_LIMIT = 15 * 60 # 任务超时时间
# 任务目录结构和具体的功能
.
├── __init__.py
├── __pycache__
│ └── __init__.cpython-38.pyc
└── web
├── wcelery
│ ├── admin.py
│ ├── apps.py
│ ├── cert
│ ├── filters.py
│ ├── __init__.py
│ ├── migrations
│ ├── migrations.tar.gz
│ ├── models
│ ├── __pycache__
│ ├── serializers.py
│ ├── tasks.py # 使用文件
│ ├── tests.py
│ ├── urls.py
│ └── views.py
@app.task 需要在方法上面添加装饰器
# tasks.py # 使用文件
@app.task
def recived_data(user_id, up_down):
user_obj = UserProfile.objects.filter(id=user_id, status=1).first()
if user_obj:
if up_down == 1:
info = "快到上班时间了,别忘记打卡"
elif up_down == 2:
info = "再不打卡就要迟到了"
else:
info = "今天上班辛苦了,别忘记打卡"
Notice5Tool(user_obj).sendNotice(label=2, user=user_obj, info=info)
3.启动和执行
1.启动work
celery -A application.celery:app worker -l info -P eventlet -E
> application.celery是celery文件所在的路径,app是celery对象的名称,
2.启动beat
celery -A application.celery:app beat -l INFO
3.问题
1.pool参数可配置solo,eventlet等,当–pool=solo,多个任务是串行执行,效率低,–pool=eventlet,多个任务是并发执行,效率高,其中用到了协程技术
2.当pool配置成eventlet,首先要安装eventlet,并且当Celery配置中的CELERY_RESULT_BACKEND = 'django-db’时,可能报"DatabaseError: DatabaseWrapper objects created in a thread can only be used in that same thread. The object with alias ‘default’ was created in thread id 140107533682432 and this is thread id 65391024"错误