Django初步使用Celery
1、Celery方式的选择
这里Celery的中间人,我采用Redis。也可以用Django自身和mongodb等。Celery的中间人你可以理解为在Celery执行过程中的数据支持。保存列队记录、执行记录等等。安装Redis,可参考Redis在CentOS和Windows安装过程。
安装redis
brew install redis
安装celery-with-redis,执行命令:
pip install celery-with-redis
该命令会自动安装redis(python库)、celery、kombu、billiard、amqp、vine和celery-with-redis相关库。注意,这里pip安装的redis是python操作redis的库,非redis数据库。redis数据库需要独立安装。
2、Django加入Celery
打开settings.py所在的文件夹,新建celery.py文件。加入如下代码
from __future__ import absolute_import, unicode_literals
from celery import Celery
from django.conf import settings
from os import path, environ
project_name = path.split(path.dirname(__file__))[-1] # okr_manage
project_settings = "{}.settings".format(project_name)
# 设置环境变量
environ.setdefault("DJANGO_SETTINGS_MODULE", project_settings)
# 实例化Celery
app = Celery(project_name)
# 使用django的settings文件配置celery
app.config_from_object("django.conf:settings", namespace='CELERY')
# Celery加载所有注册的应用
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print("Request: {0!r}".format(self.request))
还需在settings.py中设置celery,尤其是中间人的设置。若不设置中间人,会提示无法连接中间人的错误。在settings.py文件中添加如下设置:
#celery settings
#celery中间人 redis://redis服务所在的ip地址:端口/数据库号
BROKER_URL = "redis://localhost:6379/"
#celery结果返回,可用于跟踪结果
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
#celery内容等消息的格式设置
CELERY_ACCEPT_CONTENT = ['application/json',]
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
#celery时区设置,使用settings中TIME_ZONE同样的时区
CELERY_TIMEZONE = TIME_ZONE
3、把耗时任务丢给celery处理(在app文件夹中新建tasks.py文件)
from __future__ import absolute_import, unicode_literals
import logging
from time import sleep
import requests
from celery import shared_task
logger = logging.getLogger(__name__)
@shared_task
def send(email):
print("start send email to %s" % email)
time.sleep(5) #休息5秒
print("success")
return True
view中调用即可:
#coding:utf-8
from django.shortcuts import render
from django.http import HttpResponse
from .models import Blog
from .tasks import sendmail #引用tasks.py文件的中sendmail方法
import json
def home(request):
#耗时任务,发送邮件(用delay执行方法)
sendmail.delay("test@test.com")
#其他行为
data = list(Blog.objects.values('caption'))
return HttpResponse(json.dumps(data), content_type = 'application/json')
4、本地启动celery并测试
启动celery之前,确保已经安装redis和启动redis服务.然后启动celery worker
Celery -A myproject worker -l info
注意:
可能会出现以下很麻烦的问题.备注以下解决方案:
celery -A okr_manage worker -l info 无任何反应
问题是原来是async名称更换了.开发人员已经处理了这个issue,合并了master,快速的解决方案是通过github安装celery,命令如下:
pip install --upgrade https://github.com/celery/celery/tarball/master
5、django中记录celery的日志
settings中设置日志handler:
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"standard": {
"format": "%(asctime)s [%(threadName)s:%(thread)d] [%(name)s:%(lineno)d] [%(module)s:%(funcName)s] "
"[%(levelname)s]- %(message)s"
}
},
"filters": {},
"handlers": {
"mail_admins": {
"level": "ERROR",
"class": "django.utils.log.AdminEmailHandler",
"include_html": True,
},
"default": {
"level": "DEBUG",
"class": "logging.handlers.RotatingFileHandler",
"filename": mk_log_path("all.log"),
"maxBytes": 1024 * 1024 * 5,
"backupCount": 5,
"formatter": "standard",
},
"error": {
"level": "ERROR",
"class": "logging.handlers.RotatingFileHandler",
"filename": mk_log_path("error.log"),
"maxBytes": 1024 * 1024 * 5,
"backupCount": 5,
"formatter": "standard",
},
"console": {
"level": "INFO",
"class": "logging.StreamHandler",
"formatter": "standard",
},
"request_handler": {
"level": "DEBUG",
"class": "logging.handlers.RotatingFileHandler",
"filename": mk_log_path("script.log"),
"maxBytes": 1024 * 1024 * 5,
"backupCount": 5,
"formatter": "standard",
},
"script_handler": {
"level": "DEBUG",
"class": "logging.handlers.RotatingFileHandler",
"filename": mk_log_path("script.log"),
"maxBytes": 1024 * 1024 * 5,
"backupCount": 5,
"formatter": "standard",
},
"celery_logger" {
"level": "DEBUG",
"filters": None,
"class": "logging.handlers.RotatingFileHandler",
"filename": mk_log_path("celery.log"),
"maxBytes": 1024 * 1024 * 5,
"backupCount": 2,
"formatter": "standard"
},
"celery_task_logger": {
"level": "DEBUG",
"filters": None,
"class": "logging.handlers.RotatingFileHandler",
"filename": mk_log_path("celery_tasks.log"),
"maxBytes": 1024 * 1024 * 5,
"backupCount": 2,
"formatter": "standard"
},
},
"loggers": {
"django": {
"handlers": ["default", "console"],
"level": "DEBUG",
"propagate": False,
},
"django.request": {
"handlers": ["request_handler"],
"level": "DEBUG",
"propagate": False,
},
"scripts": {
"handlers": ["script_handler"],
"level": "INFO",
"propagate": False,
},
# 下面是要用到的py文件.
"okr.views": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"tool_site.views": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"okr.models": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"okr.okr_middleware": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"handler.personal": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"handler.tpo_tools": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"handler.auto_view": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"mock.views": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"tools_site.views": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"tools.utils": {
"handlers": ["default", "error", "console", "request_handler", "script_handler"],
"level": "INFO",
"propagate": True,
},
"tool_site.tasks": {
"handlers": ["celery_task_logger"],
"level": "INFO",
"propagate": True,
},
"celery": {
"handlers": ["celery_logger"],
"level": "INFO",
"propagate": True,
},
},
}
tasks.py
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@shared_task
def send(username, uri, query_data, count=100):
try:
logger.info("push success")
return "push success"
except Exception as e:
logger.exception(e)
return "push failed"
celery中使用多进程
在celery中不可以使用 from multiprocessing import Process. 因为celery自己已经封装了一个.使用 from billiard.context import Process即可
from __future__ import absolute_import, unicode_literals
from time import sleep
import requests
from billiard.context import Process
from celery import shared_task
from celery.utils.log import get_task_logger
from tool_site.models import PushInfo
logger = get_task_logger(__name__)
def push(url, pk):
resp = requests.post(url)
PushInfo.objects.write_info(pk=pk, second=resp.elapsed.total_seconds(), code=resp.status_code)
@shared_task
def send(p_id, uri, query_data, count=100):
try:
PushInfo.objects.start_push(pk=p_id)
for i, v in enumerate(query_data):
url = "{}?json={}".format(uri, v)
p = Process(target=push, args=(url, p_id))
p.start()
p.join()
if not (i + 1) % int(count):
logger.info("sleep:1S".format(i + 1))
sleep(1)
PushInfo.objects.finish(pk=p_id)
logger.info("push success")
return "push success"
except Exception as e:
logger.exception(e)
return "push failed"