记一次通过faust读取kafka数据动态增、删、改APScheduler任务的笔记,手敲无测!
# -*- coding: utf-8 -*-
import asyncio
import logging
import faust
import ujson
from apscheduler.executors.pool import ThreadPoolExecutor
from apscheduler.jobstores.mongodb import MongoDBJobStore
from apscheduler.schedulers.background import BackgroundScheduler
from pymongo import MongoClient
logging.basicConfig()
logging.getLogger('apscheduler').setLevel(logging.DEBUG)
logger = logging.getLogger(__name__)
# 这里使用mongodb做为持久化任务存储
mongo_jobstore = MongoDBJobStore(
database="apscheduler_job",
collection="job",
client=MongoClient("127.0.0.1", 27017)
)
init_scheduler_options = {
"jobstores": {
"default": mongo_jobstore # 默认持久化存储器
},
"executors": {
'default': ThreadPoolExecutor(20) # 20个线程的线程池
},
"job_defaults": {
'coalesce':