flask_apscheduler 自定义model及任务的增删改查

这是一个练手的demo,注释部分未进行去除,主要借鉴的是方法,希望看到的朋友有所收获。

项目结构如图:
在这里插入图片描述
api.init.py

from task_demo.api.config import Config

api.config.py

from apscheduler.executors.pool import ThreadPoolExecutor
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore


class Config(object):
    JOBS = [
        {
            'id': 'job1',
            'func': 'task_demo.main:job1',
            'args': (1, 2),
            'trigger': 'interval',
            'seconds': 30,
            'replace_existing': True,

        },
        {
            'id': 'job2',
            'func': 'task_demo.main:job2',
            'trigger': 'interval',
            # 'minutes': 10,
            'seconds': 30,
            'replace_existing': True,

        },
        {
            'id': 'job3',
            'func': 'task_demo.main:job3',
            'args': (),
            'trigger': 'cron',
            # 'day': '*',
            # 'hour': '*',
            # 'minute': '*',
            'second': 30,
            'replace_existing': True,
        }
    ]
    SCHEDULER_JOBSTORES={
        # 'default':RedisJobStore(db=14,
        #                 jobs_key='apscheduler.jobs',
        #                 run_times_key='apscheduler.run_times',
        #                 pickle_protocol=pickle.HIGHEST_PROTOCOL,
        #                 **connect_args)

        'default':SQLAlchemyJobStore(url='mysql+pymysql://root:123456@192.168.101.2:3306/scheduler?charset=utf8',tablename='myjobs',),
    }
    SCHEDULER_EXECUTORS={
        'default':ThreadPoolExecutor(20),
    }
    SCHEDULER_JOB_DEFAULTS={
        # 'coalesce':True,
        'coalesce':False,
        'max_instances':5
    }
    SCHEDULER_TIMEZONE = 'Asia/Shanghai'
    SCHEDULER_API_ENABLED = True


init.py

import logging

from apscheduler.events import EVENT_JOB_ERROR, EVENT_JOB_MISSED, EVENT_JOB_EXECUTED
# from apscheduler.schedulers.background import BackgroundScheduler
from flask import Flask
# from flask_apscheduler import APScheduler
from flask_migrate import Migrate

from task_demo.db import db
from task_demo.load_job import load_job
from task_demo.main import job_listener
from task_demo.apidemo import bp as apidemo_url
# scheduler = APScheduler(BackgroundScheduler(timezone="Asia/Shanghai"))
from task_demo.scheduler import scheduler


def create_app(test_config=None):
    # app = Flask(__name__, instance_relative_config=True)
    app = Flask(__name__)
    if test_config is None:
        # load the instance config, if it exists, when not testing
        # app.config.from_pyfile("config.py", silent=True)
        print('come')
        app.config.from_pyfile("config.py")
    else:
        # load the test config if passed in
        app.config.update(test_config)
    # app.config.update(
    #     DEBUG=True,SECRET_KEY='xxxx'
    # )
    # app.config.from_pyfile("task_demo.config.py", silent=True)
    app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://root:123456@10.123.5.23:3306/scheduler?charset=utf8'
    app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
    app.config['SQLALCHEMY_BINDS'] = {
        'config': 'mysql+pymysql://root:123456@127.0.0.1:3306/scheduler?charset=utf8',
    }
    app.config["SQLALCHEMY_ECHO"] = True
    db.init_app(app)
    db.app = app
    Migrate(app, db)
    app.register_blueprint(apidemo_url)
    load_job(app)
    scheduler.init_app(app)
    # scheduler.add_job(func=test_job, args=('一次性任务,会出错',),
    #                   next_run_time=datetime.datetime.now() + datetime.timedelta(seconds=15), id='date_task3666666',
    #                   replace_existing=True)
    # scheduler.add_job(func=my_job, args=('循环任务',), trigger='interval', seconds=10, id='interval_task',replace_existing=True)

    scheduler.add_listener(job_listener, EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_EXECUTED)  # 添加监听事件

    scheduler._logger = logging
    scheduler.start()
    return app

apidemo.py

```python
import json

from flask import Blueprint, Response, request, jsonify

from task_demo import scheduler
from task_demo.models import MyJobs

bp = Blueprint("charge_job", __name__, url_prefix='/task/')

from flask_sqlalchemy import SQLAlchemy

db = SQLAlchemy()


@bp.route('/add_job', methods=['POST', ])
def add_job(job_id, func, args, seconds):
    """添加job"""
    print(f"添加job - {job_id}")
    scheduler.add_job(id=job_id, func=func, args=args, trigger='interval', seconds=seconds)
    return Response(json.dumps(f"添加job - {job_id}"))


@bp.route("/update_job/<string:job_id>", methods=["PATCH"])
def update_job(job_id):
    print(job_id, '+')
    payload = request.json
    job_name = payload.get('name')
    trigger = payload.get('trigger')
    trigger_type = payload.get('trigger_type')
    second = trigger.get('second', '*')

    if trigger_type == 'cron':

        job = scheduler.modify_job(job_id,
                                   trigger='cron',
                                   name=job_name,
                                   func=payload.get('func'),
                                   second=second,
                                   args=tuple(payload.get('args', ())),
                                   kwargs=payload.get('kwargs', {})
                                   )
        return jsonify({
            'jobs': [
                {
                    'id': job.id,
                    'name': job.name,
                    'func': job.func_ref,
                }
            ]
        })
    elif trigger_type == "interval":
        job = scheduler.modify_job(job_id,
                                   trigger='interval',
                                   name=job_name,
                                   func=payload.get('func'),
                                   seconds=second,
                                   args=tuple(payload.get('args', ())),
                                   kwargs=payload.get('kwargs', {})
                                   )
        return jsonify({
            'jobs': [
                {
                    'id': job.id,
                    'name': job.name,
                    'func': job.func_ref,
                }
            ]
        })

    return jsonify({})


@bp.route('/remove_job/<string:job_id>/', methods=['POST', ])
def remove_job(job_id):
    """移除job"""
    scheduler.remove_job(job_id)
    print(f"移除job - {job_id}")
    return Response(json.dumps(f"移除job - {job_id}"))


@bp.route('/pause_job/<string:job_id>/', methods=['POST', ])
def pause_job(job_id):
    """停止job"""
    scheduler.pause_job(job_id)
    print(f"停止job - {job_id}")
    return Response(json.dumps(f"停止job - {job_id}"))


@bp.route('/resume_job/<string:job_id>/', methods=['POST', ])
def resume_job(job_id):
    """恢复job"""
    scheduler.resume_job(job_id)
    print(f"恢复job - {job_id}")
    return Response(json.dumps(f"停止job - {job_id}"))


@bp.route('/shutdown', methods=['GET', ])
def shutdown():
    """关闭调度器"""
    scheduler.shutdown(wait=False)
    return Response(json.dumps(f"关闭调度器"))


@bp.route('/', methods=['GET', ])
def get_jobs():
    """获取所有job信息,包括已停止的"""
    # if request.method == 'GET':
    res = scheduler.get_jobs()
    print(f"所有job - {res}")
    print([print(t.trigger) for t in res])
    if res:
        return jsonify({
            'jobs': [
                {
                    'id': job.id,
                    'name': job.name,
                    'func': job.func_ref,
                    'trigger': {
                        t.name: str(t.trigger)
                        for t in res if t.id == job.id
                    },
                    'args': job.args,
                    'kwargs': job.kwargs,
                } for job in res
            ]
        })
    else:
        return jsonify(f"所有job - {res}")


@bp.route('/query_prefency/', methods=['GET'])
def query_prefency():
    job = db.session.query(MyJobs).all()
    data=[]
    for job in job:
        data.append(job)
    data=[{'name':job.id} for job in data]
    return jsonify(json.dumps(data))

db.py

from flask_sqlalchemy import SQLAlchemy

db = SQLAlchemy()

load_job.py

import importlib


def load_job(app):
    module_path = "task_demo.api"
    module = importlib.import_module(module_path)
    config = getattr(module, 'Config')
    update_job(config)
    app.config.from_object(config())


def update_job(config):
    jobs_id_not_exsits_data = []
    job_id_list = [job.get('id') for job in config.JOBS]
    try:
        from task_demo.models import MyJobs
        for job_id in job_id_list:
            myjob = MyJobs.query.filter(MyJobs.id == job_id).first()
            # print(myjob)
            if myjob:
                continue
            else:
                jobs_id_not_exsits_data.append(job_id)
        config.JOBS = [job for job in config.JOBS if str(job.get('id')) in jobs_id_not_exsits_data]
    except:
        pass

main.py

import datetime
import pickle
import time
# from task_demo import scheduler
# from task_demo.apidemo import bp as apidemo_url
from apscheduler.events import EVENT_JOB_ERROR, EVENT_JOB_MISSED, EVENT_JOB_EXECUTED

# from task_demo import scheduler
from task_demo.scheduler import scheduler
from task_demo.load_job import load_job
import logging

logger = logging.getLogger('job')
logging.basicConfig(level=logging.INFO,
                    format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S',
                    filename='mylog.txt',
                    filemode='a')


connect_args = {
    'host': '127.0.0.1',
    'port': 63379,
    # 'password': '123456'
}


def job1(a, b):
    print(str(a) + ' ' + str(b))


def job2():
    print('Hello world!')


def job3():
    print('this is a test!!!')


def print_time(name):
    print(f'{name} - {time.ctime()}')


def job_listener(Event):
    job = scheduler.get_job(Event.job_id)
    if not Event.exception:
        print('任务正常运行!')
        logger.info('wowowowowowo')
        logger.info("jobname=%s|jobtrigger=%s|jobtime=%s|retval=%s", job.name, job.trigger, Event.scheduled_run_time,
                    Event.retval)

    else:
        print("任务出错了!!!!!")
        logger.error("jobname=%s|jobtrigger=%s|errcode=%s|exception=[%s]|traceback=[%s]|scheduled_time=%s", job.name,
                     job.trigger, Event.code, Event.exception, Event.traceback, Event.scheduled_run_time)


def test_job(x):
    print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), x)
    print(1 / 0)


def my_job(x):
    print('hello this here ')
    print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), x)


# if __name__ == '__main__':
#     app = create_app()
#     app.register_blueprint(apidemo_url)
#     load_job(app)
#     scheduler.init_app(app)
#     # scheduler.add_job(func=test_job, args=('一次性任务,会出错',),
#     #                   next_run_time=datetime.datetime.now() + datetime.timedelta(seconds=15), id='date_task3666666',
#     #                   replace_existing=True)
#     # scheduler.add_job(func=my_job, args=('循环任务',), trigger='interval', seconds=10, id='interval_task',replace_existing=True)
#
#     scheduler.add_listener(job_listener, EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_EXECUTED) #添加监听事件
#
#     scheduler._logger = logging
#     scheduler.start()
#     app.run(port=9999, host='0.0.0.0')


model.py

from task_demo import db


class MyJobs(db.Model):
    __tablename__ = 'myjobs'
    id = db.Column('id', db.String(200), primary_key=True)
    next_run_time = db.Column('next_run_time', db.Float, index=True)
    job_state = db.Column('job_state', db.LargeBinary, nullable=False)

redis_help.py

import redis

# redis_helper=redis.Redis(host='10.123.5.23',port=6379, db=14)
redis_helper=redis.Redis(host='127.0.0.1',port=6379, db=14)
# redis_helper=redis.Redis(host='localhost',port=6379, db=14)


print(redis_helper.keys())

scheduler.py

from apscheduler.schedulers.background import BackgroundScheduler
from flask_apscheduler import APScheduler

scheduler = APScheduler(BackgroundScheduler(timezone="Asia/Shanghai"))

成功运行的内容如下图
在这里插入图片描述
pycharm 运行配置:
在这里插入图片描述

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值