apscheduler
import time
import traceback
import redis
from pymongo import MongoClient
from pytz import utc
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.jobstores.mongodb import MongoDBJobStore
from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.jobstores.redis import RedisJobStore
from apscheduler.executors.pool import ThreadPoolExecutor, ProcessPoolExecutor
from apscheduler.events import EVENT_JOB_MAX_INSTANCES, EVENT_JOB_ERROR, EVENT_JOB_MISSED
def cloudcc_job(command, id):
print(time.strftime("%Y-%m-%d %H:%M:%S"), "[开始执行cloudcc_job]", "id为%s, %s" % (id, command))
print(time.strftime("%Y-%m-%d %H:%M:%S"), "[完成执行cloudcc_job]", "%s" % id)
class ApschedulerClass(object):
def __init__(self):
self.client = MongoClient(host='172.16.0.122', port=8088)
# self.pool = redis.ConnectionPool(host='172.16.0.125', port=6379)
jobstores = {
'default': MongoDBJobStore(collection='cloudcc_job', database='test', client=self.client),
# 'redis': RedisJobStore(connection_pool=self.pool),
# 'default': MemoryJobStore(),
}
executors = {
'default': ThreadPoolExecutor(20),
'processpool': ProcessPoolExecutor(10)
}
job_defaults = {
'coalesce': False,
'max_instances': 3,
# 'misfire_grace_time': 60