django自定义logger

django中使用自带的日志系统,分割日志的方式有两种。
一种是MyRotatingFileHandler,一种是MyTimedRotatingFileHandler。
前者是指 日志文件到达一定大小之后自动将此文件重命名保存,
在生成基础文件写入日志,如此循环。
后者是指 日志文件到达一定大小之后自动将此文件重命名保存,
在生成基础文件写入日志,如此循环。
两者大同小异,但是django自带的分割器命名的日志格式很奇怪,
这里就讲一下如何改造其源码,形成自定义的命名。

django的settings文件

# -*- coding: utf-8 -*- 
# @Time : 2019/11/21 8:25
# @File : settings.py
import os
DEBUG = False



"""日志输出"""
# 创建总日志文件夹
ALL_LOG = os.path.join(ABS_PATH, "logger")
if not os.path.exists(ALL_LOG):
    os.makedirs(ALL_LOG)
# 创建各个块日志分文件夹
LOG_MAX_BYTES = 1 * 1024 * 1024  # MB
LOG_BACKUP_COUNT = 10
DEFAULT_LOG = os.path.join(ALL_LOG, "default_log")
SYS_LOG = os.path.join(ALL_LOG, "sys_log")
TASK_LOG = os.path.join(ALL_LOG, "task_log")
DEVICE_LOG = os.path.join(ALL_LOG, "device_log")
RECORD_LOG = os.path.join(ALL_LOG, "record_log")
try:
    for l in [SYS_LOG, RECORDS_LOG_PATH]:
        if not os.path.exists(l):
            os.makedirs(l)
except FileExistsError as e:
    print(e)
except Exception as e:
    print(e)
"""日志输出"""

根据时间切割日志

from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler, BaseRotatingHandler
from stat import ST_MTIME
from settings import LOG_BACKUP_COUNT

class BaseLibs:

    def get_strtime(self, suffix="%Y-%m-%d-%H-%M-%S"):
        return time.strftime(suffix, time.localtime())

    def get_strtime_path(self, string):
        return re.sub("(\d{4}-\d{1,2}-\d{1,2}-\d{1,2}-\d{1,2}-\d{1,2})", self.get_strtime(), string)


class MyTimedRotatingFileHandler(TimedRotatingFileHandler, BaseLibs):

    def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False,
                 atTime=None):
        BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
        self.when = when.upper()
        self.backupCount = backupCount
        self.utc = utc
        self.atTime = atTime
        # Calculate the real rollover interval, which is just the number of
        # seconds between rollovers.  Also set the filename suffix used when
        # a rollover occurs.  Current 'when' events supported:
        # S - Seconds
        # M - Minutes
        # H - Hours
        # D - Days
        # midnight - roll over at midnight
        # W{0-6} - roll over on a certain day; 0 - Monday
        #
        # Case of the 'when' specifier is not important; lower or upper case
        # will work.
        if self.when == 'S':
            self.interval = 1  # one second
        elif self.when == 'M':
            self.interval = 60  # one minute
        elif self.when == 'H':
            self.interval = 60 * 60  # one hour
        elif self.when == 'D' or self.when == 'MIDNIGHT':
            self.interval = 60 * 60 * 24  # one day
        elif self.when.startswith('W'):
            self.interval = 60 * 60 * 24 * 7  # one week
            if len(self.when) != 2:
                raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when)
            if self.when[1] < '0' or self.when[1] > '6':
                raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
            self.dayOfWeek = int(self.when[1])
        else:
            raise ValueError("Invalid rollover interval specified: %s" % self.when)
        self.suffix = "%Y-%m-%d_%H-%M-%S"
        self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}?$"
        if self.when == 'D' or self.when == 'MIDNIGHT':
            self.suffix = "%Y-%m-%d"
            self.extMatch = r"^\d{4}-\d{2}-\d{2}$"
        self.extMatch = re.compile(self.extMatch, re.ASCII)
        self.interval = self.interval * interval  # multiply by units requested
        # The following line added because the filename passed in could be a
        # path object (see Issue #27493), but self.baseFilename will be a string
        filename = self.baseFilename
        if os.path.exists(filename):
            t = os.stat(filename)[ST_MTIME]
        else:
            t = int(time.time())
        self.rolloverAt = self.computeRollover(t)

    def doRollover(self):
        """
        do a rollover; in this case, a date/time stamp is appended to the filename
        when the rollover happens.  However, you want the file to be named for the
        start of the interval, not the current time.  If there is a backup count,
        then we have to get a list of matching filenames, sort them and remove
        the one with the oldest suffix.
        """
        if self.stream:
            self.stream.close()
            self.stream = None
        # get the time that this sequence started at and make it a TimeTuple
        currentTime = int(time.time())
        dstNow = time.localtime(currentTime)[-1]
        t = self.rolloverAt - self.interval
        if self.utc:
            timeTuple = time.gmtime(t)
        else:
            timeTuple = time.localtime(t)
            dstThen = timeTuple[-1]
            if dstNow != dstThen:
                if dstNow:
                    addend = 3600
                else:
                    addend = -3600
                timeTuple = time.localtime(t + addend)
        filename, log_suffix = self.baseFilename.rsplit(".", maxsplit=1)
        dfn_filename = ("%s." % time.strftime(self.suffix, timeTuple)).join([filename, log_suffix])
        dfn = self.rotation_filename(dfn_filename)
        if os.path.exists(dfn):
            os.remove(dfn)
        self.rotate(self.baseFilename, dfn)
        if self.backupCount > 0:
            for s in self.getFilesToDelete():
                os.remove(s)
        if not self.delay:
            self.stream = self._open()
        newRolloverAt = self.computeRollover(currentTime)
        while newRolloverAt <= currentTime:
            newRolloverAt = newRolloverAt + self.interval
        # If DST changes and midnight or weekly rollover, adjust for this.
        if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc:
            dstAtRollover = time.localtime(newRolloverAt)[-1]
            if dstNow != dstAtRollover:
                if not dstNow:  # DST kicks in before next rollover, so we need to deduct an hour
                    addend = -3600
                else:  # DST bows out before next rollover, so we need to add an hour
                    addend = 3600
                newRolloverAt += addend
        self.rolloverAt = newRolloverAt

    def getFilesToDelete(self):
        """
        Determine the files to delete when rolling over.

        More specific than the earlier method, which just used glob.glob().
        """
        dirName, baseName = os.path.split(self.baseFilename)
        fileNames = os.listdir(dirName)
        result = []
        re_match = r"^(dt_record)\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.log)?$"
        if self.when == 'D' or self.when == 'MIDNIGHT':
            re_match = r"^(dt_record)\d{4}-\d{2}-\d{2}(\.log)?$"
        for fileName in fileNames:
            if re.match(re_match, fileName):
                suffix = fileName.lstrip("dt_record").rstrip(".log")
                if self.extMatch.match(suffix):
                    result.append(os.path.join(dirName, fileName))
        if len(result) < self.backupCount:
            result = []
        else:
            result.sort()
            result = result[:len(result) - self.backupCount]
        return result

根据文件大小切割日志

from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler, BaseRotatingHandler
from stat import ST_MTIME
from settings import LOG_BACKUP_COUNT

class BaseLibs:

    def get_strtime(self, suffix="%Y-%m-%d-%H-%M-%S"):
        return time.strftime(suffix, time.localtime())

    def get_strtime_path(self, string):
        return re.sub("(\d{4}-\d{1,2}-\d{1,2}-\d{1,2}-\d{1,2}-\d{1,2})", self.get_strtime(), string)



class MyRotatingFileHandler(RotatingFileHandler, BaseLibs):
    def doRollover(self):
        """
        Do a rollover, as described in __init__().
        """
        if self.stream:
            self.stream.close()
            self.stream = None
        log_list = os.listdir(os.path.dirname(self.baseFilename))
        log_path, log_origin = os.path.split(self.baseFilename)
        log_list.remove(log_origin)
        if self.backupCount > 0:
            if len(log_list) > LOG_BACKUP_COUNT - 1:
                log_list = sorted(log_list,
                                  key=lambda x: re.search(r"(\d{4}-\d{1,2}-\d{1,2}-\d{1,2}-\d{1,2}-\d{1,2})",
                                                          x).group(), reverse=True)
                os.remove(os.path.join(os.path.dirname(self.baseFilename), log_list[-1]))
            filename, suffix = self.baseFilename.rsplit(".", maxsplit=1)
            dfn_filename = ("%s." % self.get_strtime()).join([filename, suffix])
            dfn = self.rotation_filename(dfn_filename)
            self.rotate(self.baseFilename, dfn)
        if not self.delay:
            self.stream = self._open()

配置及实例化

新命名一个文件logger,这个随意。

# -*- coding: utf-8 -*- 
# @Time : 2019/11/7 13:10
# @File : Logger.py
import json
import logging
import logging.config as config
import os
import time
import settings

default_log = os.path.join(settings.DEFAULT_LOG, "default.log")
sys_log = os.path.join(settings.SYS_LOG, "sys.log")
task_log = os.path.join(settings.TASK_LOG, "task.log")
device_log = os.path.join(settings.DEVICE_LOG, "device.log")
record_log = os.path.join(settings.RECORDS_LOG_PATH, "record.log")

standard_format = '[%(asctime)s][%(levelname)s] %(message)s'  # 其中name为getlogger指定的名字
simple_format = '[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d]%(message)s'
id_simple_format = '[%(levelname)s][%(asctime)s] %(message)s'

LOGGING_DIC = {
    'version': 1,
    'disable_existing_loggers': False,  # 新创建的logger是否要将老的logger过期
    'formatters': {
        'standard_format': {
            'format': standard_format
        },
        'simple_format': {
            'format': simple_format
        },
    },
    'handlers': {
        'console': {
            'level': 'DEBUG',
            'class': 'logging.StreamHandler',
            'formatter': 'simple_format'
        },
        'sys_log': {
            'level': 'DEBUG',
            'class': 'EC.libs.eclibs.MyRotatingFileHandler',
            # 'class': 'logging.handlers.RotatingFileHandler',
            'filename': sys_log,
            'maxBytes': settings.LOG_MAX_BYTES,
            'backupCount': settings.LOG_BACKUP_COUNT,
            'formatter': 'standard_format',
        },
        'record_log': {
            'level': 'DEBUG',
            # 'class': 'logging.handlers.TimedRotatingFileHandler',
            'class': 'EC.libs.eclibs.MyTimedRotatingFileHandler',
            'filename': record_log,
            'when': 'MIDNIGHT',  # 按天划分
            'interval': 1,
            'formatter': 'standard_format',
            'encoding': 'utf-8',
        }
    },
    'loggers': {
        'sys': {
            'handlers': ['sys_log', "console"],
            'level': 'INFO',
            'propagate': False,
        },
        'record': {
            'handlers': ['record_log'],
            'level': 'INFO',
            'propagate': False
        }
    }
}
config.dictConfig(LOGGING_DIC)
default_logger = logging.getLogger('default')
sys_logger = logging.getLogger('sys')
task_logger = logging.getLogger('task')
device_logger = logging.getLogger('device')
record_logger = logging.getLogger('record')


if __name__ == '__main__':
    import random

    while True:
        trigger = ''.join(random.sample(
            ['z', 'y', 'x', 'w', 'v', 'u', 't', 's', 'r', 'q', 'p', 'o', 'n', 'm', 'l', 'k', 'j', 'i', 'h', 'g', 'f',
             'e',
             'd', 'c', 'b', 'a'], 15))
        sys_logger.info(trigger)
        # time.sleep(0.05)
        # default_logger.info(trigger)
        # time.sleep(0.001)

最后切记,一定要运行logger.py去跑一下,看看会出什么问题,及时解决,当然我这版是没有问题的,项目里面一定要先验证在用,要不然你会死的很难看的。

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值