logback日志

  1. 新建logback-common.xml文件
    <?xml version="1.0" encoding="UTF-8"?>
    <included>
        <property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"/>
        <!-- 控制台输出 -->
        <appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
            <file>${log.path}/console.log</file>
            <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
                <!-- 日志文件名格式 -->
                <fileNamePattern>${log.path}/console.%d{yyyy-MM-dd}.log</fileNamePattern>
                <!-- 日志最大 1天 -->
                <maxHistory>1</maxHistory>
            </rollingPolicy>
            <encoder>
                <pattern>${log.pattern}</pattern>
                <charset>utf-8</charset>
            </encoder>
            <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
                <!-- 过滤的级别 -->
                <level>INFO</level>
            </filter>
        </appender>
    
        <!-- 系统请求参数输入输出 -->
        <appender name="file_debug" class="ch.qos.logback.core.rolling.RollingFileAppender">
            <filter class="ch.qos.logback.classic.filter.LevelFilter">
                <!-- 过滤的级别 -->
                <level>DEBUG</level>
                <!-- 匹配时的操作:接收(记录) -->
                <onMatch>ACCEPT</onMatch>
                <!-- 不匹配时的操作:拒绝(不记录) -->
                <onMismatch>DENY</onMismatch>
            </filter>
            <file>${log.path}/debug.log</file>
            <encoder>
                <pattern>${log.pattern}</pattern>
            </encoder>
            <!-- 循环政策:基于时间创建日志文件 -->
            <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
                <!-- 日志文件名格式 -->
                <fileNamePattern>${log.path}/debug.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
                <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                    <maxFileSize>10MB</maxFileSize>
                </timeBasedFileNamingAndTriggeringPolicy>
                <!-- 日志最大的历史 5天 -->
                <maxHistory>5</maxHistory>
            </rollingPolicy>
        </appender>
    
        <!-- 系统日志输出 -->
        <appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
            <filter class="ch.qos.logback.classic.filter.LevelFilter">
                <!-- 过滤的级别 -->
                <level>INFO</level>
                <!-- 匹配时的操作:接收(记录) -->
                <onMatch>ACCEPT</onMatch>
                <!-- 不匹配时的操作:拒绝(不记录) -->
                <onMismatch>DENY</onMismatch>
            </filter>
            <file>${log.path}/info.log</file>
            <encoder>
                <pattern>${log.pattern}</pattern>
            </encoder>
            <!-- 循环政策:基于时间创建日志文件 -->
            <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
                <!-- 日志文件名格式 -->
                <fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
                <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                    <maxFileSize>10MB</maxFileSize>
                </timeBasedFileNamingAndTriggeringPolicy>
                <!-- 日志最大的历史 10天 -->
                <maxHistory>10</maxHistory>
            </rollingPolicy>
        </appender>
        <!-- 系统日志输出 -->
        <appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
            <filter class="ch.qos.logback.classic.filter.LevelFilter">
                <!-- 过滤的级别 -->
                <level>ERROR</level>
                <!-- 匹配时的操作:接收(记录) -->
                <onMatch>ACCEPT</onMatch>
                <!-- 不匹配时的操作:拒绝(不记录) -->
                <onMismatch>DENY</onMismatch>
            </filter>
            <file>${log.path}/error.log</file>
            <encoder>
                <pattern>${log.pattern}</pattern>
            </encoder>
            <!-- 循环政策:基于时间创建日志文件 -->
            <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
                <!-- 日志文件名格式 -->
                <fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
                <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                    <maxFileSize>10MB</maxFileSize>
                </timeBasedFileNamingAndTriggeringPolicy>
                <!-- 日志最大的历史 60天 -->
                <maxHistory>60</maxHistory>
            </rollingPolicy>
        </appender>
        <!-- 请求输入输出日志输出 -->
        <appender name="file_params_log" class="ch.qos.logback.core.rolling.RollingFileAppender">
            <filter class="ch.qos.logback.classic.filter.LevelFilter">
                <!-- 过滤的级别 -->
                <level>INFO</level>
                <!-- 匹配时的操作:接收(记录) -->
                <onMatch>ACCEPT</onMatch>
                <!-- 不匹配时的操作:拒绝(不记录) -->
                <onMismatch>DENY</onMismatch>
            </filter>
            <file>${log.path}/request.log</file>
            <encoder>
                <pattern>${log.pattern}</pattern>
            </encoder>
            <!-- 循环政策:基于时间创建日志文件 -->
            <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
                <!-- 日志文件名格式 -->
                <fileNamePattern>${log.path}/request.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
                <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                    <maxFileSize>10MB</maxFileSize>
                </timeBasedFileNamingAndTriggeringPolicy>
                <!-- 日志最大的历史 5天 -->
                <maxHistory>5</maxHistory>
            </rollingPolicy>
        </appender>
        <!-- debug异步输出 -->
        <appender name="async_debug" class="ch.qos.logback.classic.AsyncAppender">
            <!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
            <discardingThreshold>0</discardingThreshold>
            <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
            <queueSize>512</queueSize>
            <neverBlock>true</neverBlock>
            <!-- 添加附加的appender,最多只能添加一个 -->
            <appender-ref ref="file_debug"/>
        </appender>
        <!-- info异步输出 -->
        <appender name="async_info" class="ch.qos.logback.classic.AsyncAppender">
            <!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
            <discardingThreshold>0</discardingThreshold>
            <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
            <queueSize>512</queueSize>
            <neverBlock>true</neverBlock>
            <!-- 添加附加的appender,最多只能添加一个 -->
            <appender-ref ref="file_info"/>
        </appender>
    
        <!-- error异步输出 -->
        <appender name="async_error" class="ch.qos.logback.classic.AsyncAppender">
            <!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
            <discardingThreshold>0</discardingThreshold>
            <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
            <queueSize>512</queueSize>
            <neverBlock>true</neverBlock>
            <!-- 添加附加的appender,最多只能添加一个 -->
            <appender-ref ref="file_error"/>
        </appender>
    
        <!-- 请求参数异步输出 -->
        <appender name="async_params" class="ch.qos.logback.classic.AsyncAppender">
            <!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
            <discardingThreshold>0</discardingThreshold>
            <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
            <queueSize>512</queueSize>
            <neverBlock>true</neverBlock>
            <!-- 添加附加的appender,最多只能添加一个 -->
            <appender-ref ref="file_params_log"/>
        </appender>
    
    	<!-- 单独目录下日志记录到指定文件中 -->
        <logger name="cc.test.common.web.aspect" additivity="false" level="info">
            <appender-ref ref="async_params" />
        </logger>
    </included>
    
  2.  项目引入,新建logback.xml
    <?xml version="1.0" encoding="UTF-8"?>
    <configuration scan="true" scanPeriod="60 seconds" debug="false">
        <!-- 日志存放路径 -->
        <property name="log.path" value="logs/${project.artifactId}" />
    
        <include resource="logback-common.xml"/>
    
        <!--系统操作日志-->
        <root level="info">
            <appender-ref ref="async_info"/>
            <appender-ref ref="async_error"/>
        </root>
    </configuration>
    
  3.  请求入参、出参拦截
    package cc.test.common.web.aspect;
    
    import cn.hutool.json.JSONUtil;
    import lombok.extern.slf4j.Slf4j;
    import org.apache.commons.lang3.ArrayUtils;
    import org.aspectj.lang.ProceedingJoinPoint;
    import org.aspectj.lang.annotation.Around;
    import org.aspectj.lang.annotation.Aspect;
    import org.aspectj.lang.annotation.Pointcut;
    import org.springframework.boot.autoconfigure.AutoConfiguration;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.http.HttpHeaders;
    import org.springframework.ui.Model;
    import org.springframework.web.context.request.RequestContextHolder;
    import org.springframework.web.context.request.ServletRequestAttributes;
    import org.springframework.web.multipart.MultipartFile;
    
    import javax.servlet.http.HttpServletRequest;
    import javax.servlet.http.HttpServletResponse;
    import java.io.File;
    import java.util.Arrays;
    import java.util.List;
    import java.util.stream.Collectors;
    import java.util.stream.Stream;
    
    /**
     * @Description
     * @ClassName RequestParamsAspect
     * @Author 
     * @Date 2024/2/18 16:06
     **/
    @Slf4j
    @Aspect
    @Configuration
    @AutoConfiguration
    public class RequestParamsAspect {
        /**
         * 计算操作消耗时间
         */
        @Pointcut("execution(* cc.test..controller..*.*(..))")
        public void pointCut() {
        }
    
        @Around(value = "pointCut()")
        public Object doAuthMethodInterrupt(ProceedingJoinPoint point) throws Throwable{
            long currentTimeStampBegin = System.currentTimeMillis();
            Object obj = null;
            ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
            HttpServletRequest request = attributes.getRequest();
    
            // 入参
            Object[] paramArr = point.getArgs();
    
            //序列化时过滤掉request和response
            List<Object> logArgs = streamOf(paramArr)
                    .filter(arg ->
                            !(arg instanceof HttpServletRequest)
                                    && !(arg instanceof HttpServletResponse)
                                    && !(arg instanceof Model)
                                    && !(arg instanceof MultipartFile)
                                    && !(arg instanceof File)
                    ).collect(Collectors.toList());
            String requestURI = request.getRequestURI();
            String contentType = request.getHeader(HttpHeaders.CONTENT_TYPE);
            String Authorization = request.getHeader(HttpHeaders.AUTHORIZATION);
            // 入参打印
            log.info("[PLUS]开始请求 => URL[{}],参数类型[{}],参数:[{}],Authorization:[{}]", requestURI, contentType, JSONUtil.toJsonStr(logArgs), Authorization);
            try {
                obj = point.proceed();
                long diff = System.currentTimeMillis() - currentTimeStampBegin;
                // 出参打印
                log.info("[PLUS]结束请求 => URL[{}], 耗时:[{}]毫秒, 返回值:[{}]" , requestURI, diff, JSONUtil.toJsonStr(obj));
            } catch (Exception e) {
                log.info("[PLUS]请求异常 => URL[{}], 异常信息:[{}]", requestURI, e.getMessage());
                throw e;
            }
            return obj;
        }
    
        public static <T> Stream<T> streamOf(T[] array) {
            return ArrayUtils.isEmpty(array) ? Stream.empty() : Arrays.stream(array);
        }
    }
    

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值