通过一个异步处理前台发起请求,从Salt_Grains表获取数据,插入Host表中的例子来说明如何使用celery ,使用Celery封装处理异步真的非常简单,只要在普通的函数中加入.delay()表示通过异步执行,如 delay(5) 延时5秒执行
def sync_asset_to_host(request):
if request.method == "GET":
salt_grains = Salt_grains.objects.all()
new_minion_list = []
old_minion_list = []
for salt_host in salt_grains:
minion = salt_host.minion_id
print minion
cmdb_host = Host.objects.filter(hostname=minion)
if cmdb_host:
old_minion_list.append(minion)
else:
new_minion_list.append(minion)
print new_minion_list
print old_minion_list
response_data = {
"new_minion": new_minion_list,
"old_minion": old_minion_list,
}
host_update.delay(old_minion_list)
host_create.delay(new_minion_list)
return HttpResponse(json.dumps(response_data))
需要异步处理方法的文件中 from celery import task
@task()
def host_create(minion_list):
for item in minion_list:
grains = Salt_grains.objects.filter(minion_id=item)
host_hostname = eval(grains[0].grains).get('id').decode('string-escape')
host_ip = eval(grains[0].grains).get('fqdn_ip4')[0].decode('string-escape')
kernel = eval(grains[0].grains).get('os').decode('string-escape')
kernel_version = eval(grains[0].grains).get('osrelease').decode('string-escape')
host_manufacturer = eval(grains[0].grains).get('manufacturer').decode('string-escape')
host_kernel_release = eval(grains[0].grains).get('kernelrelease').decode('string-escape')
host_cpu_model = eval(grains[0].grains).get('cpu_model').decode('string-escape')
host_num_cpus = eval(grains[0].grains).get('num_cpus')
host_disks = eval(grains[0].grains).get('disks')
host_disk_total = eval(grains[0].grains).get('disk_total')
host_memory = eval(grains[0].grains).get('mem_total')
host_uuid = eval(grains[0].grains).get('uuid').decode('string-escape')
host_sn = eval(grains[0].grains).get('serialnumber').decode('string-escape')
host = Host()
host.vender = host_manufacturer
host.hostname = host_hostname
host_cpu_model = host_cpu_model
host.ip = host_ip
host.sn = host_sn
host.memory = host_memory
host.kernel = kernel + kernel_version
host.env_id = env_dispatch(host_ip)
host.asset_type = Manufactory_dispatch(host_manufacturer)
host.kernal_release = host_kernel_release
host.cpu_model = host_cpu_model
host.cpu_num = host_num_cpus
host.disk = host_disk_total
host.disks = host_disks
host.uuid = host_uuid
host.os = kernel
host.group_id = 1
host.project_id = 1
host.save()
@task()
def host_update(minion_list):
for item in minion_list:
grains = Salt_grains.objects.filter(minion_id=item)
host_hostname = eval(grains[0].grains).get('id').decode('string-escape')
host_ip = eval(grains[0].grains).get('fqdn_ip4')[0].decode('string-escape')
kernel = eval(grains[0].grains).get('os').decode('string-escape')
kernel_version = eval(grains[0].grains).get('osrelease').decode('string-escape')
host_manufacturer = eval(grains[0].grains).get('manufacturer').decode('string-escape')
host_kernel_release = eval(grains[0].grains).get('kernelrelease').decode('string-escape')
host_cpu_model = eval(grains[0].grains).get('cpu_model').decode('string-escape')
host_num_cpus = eval(grains[0].grains).get('num_cpus')
host_disks = eval(grains[0].grains).get('disks')
host_disk_total = eval(grains[0].grains).get('disk_total')
host_memory = eval(grains[0].grains).get('mem_total')
host_uuid = eval(grains[0].grains).get('uuid').decode('string-escape')
host_sn = eval(grains[0].grains).get('serialnumber').decode('string-escape')
host = Host.objects.filter(hostname=item)
if host.values("uuid") == host_uuid:
host.update(hostname=host_hostname,
vender=host_manufacturer,
ip=host_ip,
sn=host_sn,
os=kernel,
memory=host_memory,
kernel=kernel + kernel_version,
kernal_release=host_kernel_release,
env_id=env_dispatch(host_ip),
cpu_model=host_cpu_model,
cpu_num=host_num_cpus,
disk=host_disk_total,
disks=host_disks
)
最后我们需要申明Broker,Celery Broker分为frontend和backend具体概念百度一下吧,我们可以使用RabbitMQ或者Redis来做Broker,由于RabbitMQ更成熟管理界面更人性化,因此选择使用RabbitMQ
在Django的Settings里加入如下配置
# celery + rabbitmq
platforms.C_FORCE_ROOT = True # Running a worker with superuser privileges
djcelery.setup_loader()
BROKER_HOST = "127.0.0.1"
BROKER_PORT = 5672
BROKER_USER = "guest"
BROKER_PASSWORD = "guest"
BROKER_VHOST = "/"
下面我们启动python manage.py celery worker --loglevel=debug -c 1 &
1表示开多少个进程同时处理提高性能,可以根据服务器负载进行调整,&后台运行记得
usr/lib/python2.7/site-packages/djcelery/loaders.py:130: UserWarning: Using settings.DEBUG leads to a memory leak, never use this setting in production environments!
warn('Using settings.DEBUG leads to a memory leak, never '
[2017-07-31 21:25:02,721: WARNING/MainProcess] /usr/lib/python2.7/site-packages/djcelery/loaders.py:130: UserWarning: Using settings.DEBUG leads to a memory leak, never use this setting in production environments!
warn('Using settings.DEBUG leads to a memory leak, never '
此处没关系由于开发阶段DEBUG
[2017-07-31 21:25:02,722: WARNING/MainProcess] celery@PyDev ready.
[2017-07-31 21:25:02,722: DEBUG/MainProcess] | Worker: Hub.register Pool...
[2017-07-31 21:25:02,724: DEBUG/MainProcess] basic.qos: prefetch_count->4
看到这里说明已经启动成功