1
|
pip install celery
|
1
2
3
4
|
tar xvfz celery-
0.0
.
0
.tar.gz
cd celery-
0.0
.
0
python setup.py build
python setup.py install #
as
root
|
1
|
yum -y install rabbitmq-server
|
1
2
3
4
|
$ tar xvzf rabbitmq-server-
2.6
.
1
.tar.gz
$ cd rabbitmq-server-
2.6
.
1
$ make
# TARGET_DIR=/usr/local SBIN_DIR=/usr/local/sbin MAN_DIR=/usr/local/man make install
|
1
|
rabbitmqctl rabbitmq-env rabbitmq-server
|
1
2
3
4
5
6
|
运行
找到sbin/目录,运行程序:
/usr/local/sbin/rabbitmq-server –detached
停止程序:
/usr/local/sbin/rabbitmqctl stop
rabbitmqctl rabbitmq-env rabbitmq-server
|
1
2
3
4
5
6
7
8
9
10
11
|
#coding:utf-8
import
sys
import
os
sys.path.insert(
0
, os.getcwd())
CELERY_IMPORTS
=
(
"tasks"
, )
CELERY_RESULT_BACKEND
=
"amqp"
BROKER_HOST
=
"localhost"
BROKER_PORT
=
5672
BROKER_USER
=
"guest"
BROKER_PASSWORD
=
"guest"
BROKER_VHOST
=
"/"
|
1
2
3
4
5
6
|
from celery.task
import
task
import
time
@task()
def add(x, y):
time.sleep(
5
)
return
x + y
|
1
2
3
4
5
6
7
8
9
10
11
|
In [
1
]: from main.tasks
import
add
In [
2
]: a=add.delay(
1
,
1
)
In [
3
]: a.ready() #worker未开启
Out[
3
]: False
In [
4
]: a=add.delay(
1
,
1
) #开启worker,重新执行
In [
5
]: a.ready()
Out[
5
]: True
In [
9
]: a.
get
() #Waits until the task
is
done and returns the retval.
Out[
9
]:
2
In [
10
]: a.successful()
Out[
10
]: True
|
1
2
3
4
5
6
7
|
app.conf.update(
CELERY_TASK_SERIALIZER
=
'json'
,
CELERY_ACCEPT_CONTENT
=
[
'json'
],
# Ignore other content
CELERY_RESULT_SERIALIZER
=
'json'
,
CELERY_TIMEZONE
=
'Europe/Oslo'
,
CELERY_ENABLE_UTC
=
True
,
)
|
1
2
3
4
5
6
7
8
9
10
|
redis的方法
# tasks.py
import
time
from celery
import
Celery
import
os
celery = Celery(
'tasks'
, broker=
'redis://localhost:6379/0'
)
@celery.task
def osrun(good):
reok=os.popen(good).read()
return
reok
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
|
from celery
import
Celery
try
:
import
context_init
except ImportError:
from queue
import
context_init
import
settings
from test
import
*
BROKER =
'redis://127.0.0.1:6379/1'
celery = Celery(
'tasks'
, broker=BROKER)
@celery.task
def test():
result =
1
+
1
return
result
@celery.task
def test2(a, b):
result = a + b
return
result
|
1
|
celery -A tasks worker --loglevel=info
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
|
# Name of nodes to start, here we have a single node
CELERYD_NODES=
"w1"
# or we could have three nodes:
#CELERYD_NODES=
"w1 w2 w3"
# Where to chdir at start. (CATMAID Django project dir.)
CELERYD_CHDIR=
"/path/to/CATMAID/django/projects/mysite/"
# Python interpreter from environment. (
in
CATMAID Django dir)
ENV_PYTHON=
"/path/to/CATMAID/django/env/bin/python"
# How to call
"manage.py celeryd_multi"
CELERYD_MULTI=
"$ENV_PYTHON $CELERYD_CHDIR/manage.py celeryd_multi"
# How to call
"manage.py celeryctl"
CELERYCTL=
"$ENV_PYTHON $CELERYD_CHDIR/manage.py celeryctl"
# Extra arguments to celeryd
CELERYD_OPTS=
"--time-limit=300 --concurrency=1"
# Name of the celery config module.
CELERY_CONFIG_MODULE=
"celeryconfig"
# %n will be replaced
with
the nodename.
CELERYD_LOG_FILE=
"/var/log/celery/%n.log"
CELERYD_PID_FILE=
"/var/run/celery/%n.pid"
# Workers should run
as
an unprivileged user.
CELERYD_USER=
"celery"
CELERYD_GROUP=
"celery"
# Name of the projects settings module.
export DJANGO_SETTINGS_MODULE=
"settings"
|
1
|
adduser --system --no-create-home --disabled-login --disabled-password --group celery
|