python装饰器与try-catch的结合

python语法糖之装饰器try-catch**

#!/usr/bin/env python
# encoding: utf-8
import functools
import sys
from loguru import logger

from utils.alarm_tool import Alarm


def exception_warpper(func):
    @functools.wraps(func)
    def inner(*args, **kwargs):
        exp_msg = ''
        ret = None
        try:
            try:
                func_name = func.__name__
                file_name = func.__code__.co_filename
                line_num = func.__code__.co_firstlineno
                exp_msg = '{}:{},f:{}'.format(
                    file_name, line_num, func_name
                )
            except:
                pass
            ret = func(*args, **kwargs)
        except Exception as e:
            exc_type, exc_value, exc_traceback = sys.exc_info() 
            logger.exception(e)
            Alarm.msg('exception, {}, e:{}'.format(exp_msg, exc_value))
        return ret
    return inner

alarm报警模块

class Alarm(object):
    @classmethod
    def msg(cls, txt):
        ret = False
        try:
            url = "xxx"
            payload = {
                "tousers": ["lijinze1"], 
                "msgtype": "text", 
                "text": {
                    "content": txt
                }
            }
            requests.request("POST", url, json=payload, timeout=10)
            ret = True
        except Exception as e:
            logger.exception(e)
        return ret
    
    @classmethod
    def call(cls, txt):
        ret = False
        try:
            url = "xxx"
            payload = {
                "tousers": ["lijinze1"], 
                "msgtype": "voice", 
                "voice": {
                    "message": txt
                }
            }
            requests.request("POST", url, json=payload, timeout=10)
            ret = True
        except Exception as e:
            logger.exception(e)
        return ret

当python多线程遇到异常处理

def _crawl(self):
    crawler_dir = os.path.join(spider_dir, 'crawler', 'crawler_*.py')
    with ThreadPoolExecutor(max_workers=self.max_worker) as pool:
        futures = set()
        for cls_path in glob.glob(crawler_dir):
            imp_cls = self._load_class(
                cls_path,
                'spider.crawler',
                'CrawlerImpl'
            )
            # 加载方法对象结束
            if imp_cls is not None:
                cls_obj = imp_cls()
                job = pool.submit(exception_warpper(cls_obj.crawl))
                futures.add(job)
            else:
                logger.error('spider:{} load fail!', cls_path)
        for job in as_completed(futures):
            result = job.result()
            if result:
                self.crawler_result_list.append(result)
                logger.info('crawl:{}, {}', result.name, result.url)
            else:
                logger.warning('crawl_result is null')
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值