Slf4j日志框架的使用及Logback、Log4j2的整合和性能对比

一、Slf4j

slf4j(Simple Logging Facade for Java)是日志框架的一种抽象,那么也就是说 slf4j 是不能单独使用的必须要有其他实现日志框架来配合使用,并且如果要启用slf4j框架要导入slf4j-api-xxx.jar 这个包, 这个包是slf4j 实现各种支持的日志框架的包。如 log4j、log4j2、logback等。

编码模式

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
...
...
private static final Logger logger = LoggerFactory.getLogger(T.class);

logger.trace();
logger.debug();
logger.info();
logger.warn();
logger.error();

注解模式

IDEA需要安装Lombok插件

pom文件引入lombok依赖

@slf4j
public class OauthApp {
	...
	...
	logger.trace();
	logger.debug();
	logger.info();
	logger.warn();
	logger.error();
}

二、Logback

logback-spring.xml配置文件

<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <include resource="org/springframework/boot/logging/logback/defaults.xml"/><springProperty scope="context" name="springAppName" source="spring.application.name"/>
    <springProperty scope="context" name="ELK_FILEBEAT_PATH" source="elk.filebeat_path" defaultValue="/PATHTO/log"/>
    <springProperty scope="context" name="ELK_URL" source="elk.url" defaultValue="127.0.0.1"/><springProperty scope="context" name="ELK_QUEUE_SIZE" source="elk.queue_size" defaultValue="8192"/><!-- Example for logging into the build folder of your project -->
    <property name="LOG_FILE" value="${BUILD_FOLDER:-build}/${springAppName}"/><!-- You can override this to have a custom pattern -->
    <property name="CONSOLE_LOG_PATTERN"
              value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(%X{transNo}){faint} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}"/>

    <!-- Appender to log to console -->
    <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <!-- Minimum logging level to be presented in the console logs-->
            <level>DEBUG</level>
        </filter>
        <encoder>
            <pattern>${CONSOLE_LOG_PATTERN}</pattern>
            <charset>utf8</charset>
        </encoder>
    </appender>

    <appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>DEBUG</level>
        </filter>
        <destination>${ELK_URL}</destination>
        <queueSize>${ELK_QUEUE_SIZE}</queueSize>
        <encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
            <providers>
                <timestamp>
                    <timeZone>UTC</timeZone>
                </timestamp>
                <pattern>
                    <pattern>
                        {
                        "logdate":"%date{ISO8601}",
                        "severity": "%level",
                        "service": "${springAppName:-}",
                        "trace": "%X{X-B3-TraceId:-}",
                        "span": "%X{X-B3-SpanId:-}",
                        "parent": "%X{X-B3-ParentSpanId:-}",
                        "exportable": "%X{X-Span-Export:-}",
                        "pid": "${PID:-}",
                        "thread": "%thread",
                        "class": "%logger{40}",
                        "rest": "%message",
                        "transNo": "%X{transNo}"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
    </appender>

    <appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>INFO</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <!-- daily rollover -->
            <!--<fileNamePattern>C:/aaa/log/uw.%d{yyyy-MM-dd}.log</fileNamePattern>-->
            <fileNamePattern>${ELK_FILEBEAT_PATH}/fin-info.%d{yyyy-MM-dd}-%i.log</fileNamePattern>
            <maxFileSize>128MB</maxFileSize>
            <!-- keep 30 days' worth of history capped at 3GB total size -->
            <maxHistory>30</maxHistory>
            <totalSizeCap>2GB</totalSizeCap>
        </rollingPolicy>
        <encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
            <providers>
                <!--<timestamp>-->
                    <!--<timeZone>UTC</timeZone>-->
                <!--</timestamp>-->
                <pattern>
                    <pattern>
                        {
                        "logdate":"%date{ISO8601}",
                        "severity": "%level",
                        "service": "${springAppName:-}",
                        "trace": "%X{X-B3-TraceId:-}",
                        "span": "%X{X-B3-SpanId:-}",
                        "parent": "%X{X-B3-ParentSpanId:-}",
                        "exportable": "%X{X-Span-Export:-}",
                        "pid": "${PID:-}",
                        "thread": "%thread",
                        "class": "%logger{40}",
                        "rest": "%message",
                        "transNo": "%X{transNo}"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
    </appender>
    <appender name="FILE_DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <filter class="ch.qos.logback.classic.filter.LevelFilter">
            <level>DEBUG</level>
            <onMatch>ACCEPT</onMatch>
            <onMismatch>DENY</onMismatch>
        </filter>
        <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
            <!-- daily rollover -->
            <!--<fileNamePattern>C:/aaa/log/uw.%d{yyyy-MM-dd}.log</fileNamePattern>-->
            <fileNamePattern>${ELK_FILEBEAT_PATH}/fin-debug.%d{yyyy-MM-dd}-%i.log</fileNamePattern>
            <maxFileSize>128MB</maxFileSize>
            <!-- keep 30 days' worth of history capped at 3GB total size -->
            <maxHistory>30</maxHistory>
            <totalSizeCap>2GB</totalSizeCap>
        </rollingPolicy>
        <encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
            <providers>
                <!--<timestamp>-->
                <!--<timeZone>UTC</timeZone>-->
                <!--</timestamp>-->
                <pattern>
                    <pattern>
                        {
                        "logdate":"%date{ISO8601}",
                        "severity": "%level",
                        "service": "${springAppName:-}",
                        "trace": "%X{X-B3-TraceId:-}",
                        "span": "%X{X-B3-SpanId:-}",
                        "parent": "%X{X-B3-ParentSpanId:-}",
                        "exportable": "%X{X-Span-Export:-}",
                        "pid": "${PID:-}",
                        "thread": "%thread",
                        "class": "%logger{40}",
                        "rest": "%message",
                        "transNo": "%X{transNo}"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
    </appender>
    <!--<appender name="ASYNC" class="ch.qos.logback.classic.AsyncAppender">-->
        <!--<appender-ref ref="FILE" />-->
    <!--</appender>--><root level="info">
        <appender-ref ref="console"/>
        <!-- uncomment this to have also JSON logs -->
        <appender-ref ref="logstash"/>
<!--        <appender-ref ref="FILE" />-->
        <!--<appender-ref ref="ASYNC"/>-->
    </root>
    <logger name="com.sinosoft" level="INFO" additivity="false">
        <appender-ref ref="console"/>
        <!-- uncomment this to have also JSON logs -->
        <appender-ref ref="logstash"/>
        <appender-ref ref="FILE_INFO" />
    </logger>
    <logger name="com.sinosoft" level="DEBUG" additivity="false">
        <appender-ref ref="console"/>
        <!-- uncomment this to have also JSON logs -->
        <appender-ref ref="logstash"/>
        <appender-ref ref="FILE_DEBUG" />
    </logger>
</configuration>

属性配置:

  • elk.filebeat_path
  • elk.url
  • elk.queue_size

三、Log4j2

<?xml version="1.0" encoding="UTF-8"?>
<!--日志级别以及优先级排序: OFF > FATAL > ERROR > WARN > INFO > DEBUG > TRACE > ALL -->
<!--status="WARN" :用于设置log4j2自身内部日志的信息输出级别,默认是OFF-->
<!--monitorInterval="30"  :间隔秒数,自动检测配置文件的变更和重新配置本身-->
<configuration status="warn" monitorInterval="60" strict="true">
  <properties>
	<!--自定义一些常量,之后使用${变量名}引用-->
    <property name="logpath">./logs</property>
    <property name="charset">UTF-8</property>
	<!--自定义的输出格式-->
    <property name="pattern">%-d{yyyy-MM-dd HH:mm:ss.SSS}@@%p@@%X{ip}@@%t %C@@%X{requestId} %M %m %n </property>
  </properties>
  <!--appenders:定义输出内容,输出格式,输出方式,日志保存策略等,常用其下三种标签[console,File,RollingFile]-->
  <!--Appender可以理解为日志的输出目的地-->
  <appenders>
    <!--console :控制台输出的配置-->
    <Console name="console" target="SYSTEM_OUT">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
    </Console>
	<!--RollingRandomAccessFile性能比RollingFile提升官网宣称是20-200%-->
    <RollingRandomAccessFile name="YZY.TRACE" immediateFlush="true" bufferSize="1024"
      fileName="${logpath}/trace.log"
      filePattern="${logpath}/trace.log.%d{yyyy-MM-dd}.gz">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <TimeBasedTriggeringPolicy/>
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="trace.log.*.gz"/>
          <IfLastModified age="3d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
    <RollingRandomAccessFile name="YZY.SYSTEM" immediateFlush="true" bufferSize="4096"
      fileName="${logpath}/system.log"
      filePattern="${logpath}/system.log.%d{yyyy-MM-dd}.gz"
      ignoreExceptions="false">
	  <!--引用上面自定义的输出格式-->
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <Filters>
		<!--ThresholdFilter :日志输出过滤-->
        <!--level="info" :日志级别,onMatch="ACCEPT" :级别在info之上则接受,onMismatch="DENY" :级别在info之下则拒绝-->
        <!--与logger、root中定义的日志级别相配合,相当于两个闸门,先判断logger、root的级别,符合了才会用到该filter中的level,此时再进行一次筛选-->
        <ThresholdFilter level="TRACE" onMatch="ACCEPT" onMismatch="DENY"/>
        <!--<ThresholdFilter level="DEBUG" onMatch="ACCEPT" onMismatch="DENY"/>-->
        <!--<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>-->
      </Filters>
	  <!-- Policies :日志滚动策略-->
      <Policies>
        <!--<TimeBasedTriggeringPolicy interval="1" modulate="true"/>-->
        <CronTriggeringPolicy schedule="0 0 2 * * ?" evaluateOnStartup="true"/>
      </Policies>
	  <!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件-->
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="system.log.*.gz"/>
	  <!--只保留7天,超过则删除-->
          <IfLastModified age="7d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
    <RollingRandomAccessFile name="YZY.ERROR" immediateFlush="true" bufferSize="4096"
      fileName="${logpath}/error.log"
      filePattern="${logpath}/error.log.%d{yyyy-MM-dd}.gz"
      ignoreExceptions="false">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <Filters>
        <ThresholdFilter level="ERROR" onMatch="ACCEPT" onMismatch="DENY"/>
      </Filters>
      <TimeBasedTriggeringPolicy/>
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="error.log.*.gz"/>
          <IfLastModified age="7d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
    <RollingRandomAccessFile name="YZY.AUDIT" immediateFlush="false" bufferSize="8192"
      fileName="${logpath}/audit.log"
      filePattern="${logpath}/audit.log.%d{yyyy-MM-dd}.gz"
      ignoreExceptions="false">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <Filters>
        <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY"/>
      </Filters>
      <TimeBasedTriggeringPolicy/>
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="audit.log.*.gz"/>
          <IfLastModified age="7d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
    <RollingRandomAccessFile name="YZY.POOL" immediateFlush="true" bufferSize="1024"
      fileName="${logpath}/pool.log"
      filePattern="${logpath}/pool.log.%d{yyyy-MM-dd}.gz"
      ignoreExceptions="false">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <TimeBasedTriggeringPolicy/>
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="pool.log.*.gz"/>
          <IfLastModified age="3d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
    <RollingRandomAccessFile name="YZY.MONITOR" immediateFlush="true" bufferSize="1024"
      fileName="${logpath}/monitor.log"
      filePattern="${logpath}/pool.log.%d{yyyy-MM-dd}.gz"
      ignoreExceptions="false">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <TimeBasedTriggeringPolicy/>
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="pool.log.*.gz"/>
          <IfLastModified age="3d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
    <RollingRandomAccessFile name="YZY.BIZ" immediateFlush="true"
      fileName="${logpath}/biz.log"
      filePattern="${logpath}/biz.log.%d{yyyy-MM-dd}.gz"
      ignoreExceptions="false">
      <PatternLayout pattern="${pattern}" charset="${charset}"/>
      <TimeBasedTriggeringPolicy/>
      <DefaultRolloverStrategy>
        <Delete basePath="${logpath}" maxDepth="2" followLinks="true">
          <IfFileName glob="biz.log.*.gz"/>
          <IfLastModified age="7d"/>
        </Delete>
      </DefaultRolloverStrategy>
    </RollingRandomAccessFile>
  </appenders>
  
  <!--然后定义logger,只有定义了logger并引入的appender,appender才会生效-->
  <loggers>
	<!--additivity="false"表示在该logger中输出的日志不会再延伸到父层logger。这里如果改为true,则会延伸到Root Logger,遵循Root Logger的配置也输出一次。-->
    <Logger additivity="false" name="YZY.TRACE" level="INFO">
      <AppenderRef ref="YZY.TRACE"/>
    </Logger>
    <Logger additivity="false" name="YZY.SYSTEM" level="INFO">
      <AppenderRef ref="YZY.SYSTEM"/>
      <AppenderRef ref="YZY.ERROR"/>
    </Logger>
    <Logger additivity="false" name="YZY.BIZ" level="INFO">
      <AppenderRef ref="YZY.BIZ"/>
    </Logger>
    <!--Logger节点用来单独指定日志的形式,name为包路径,比如要为org.apache包下所有日志指定为INFO级别等。 -->
    <Logger additivity="false" name="org.apache" level="INFO">
      <AppenderRef ref="console"/>
    </Logger>
    <Logger additivity="false"
      name="com.alibaba.dubbo.common.threadpool.monitor.MonitorPoolRunnable" level="INFO">
      <AppenderRef ref="YZY.POOL"/>
    </Logger>
    <Logger additivity="false" name="com.alibaba.dubbo.monitor.dubbo.sfextend.SfMonitorExtend"
      level="INFO">
      <AppenderRef ref="YZY.MONITOR"/>
    </Logger>
    <!--针对,request以及reponse的信息配置输出级别,生产线请配置为error-->
    <Logger additivity="true" name="com.alibaba.dubbo.rpc.protocol.rest.support" level="INFO">
      <AppenderRef ref="console"/>
    </Logger>
    <!-- 在开发和测试环境启用,输出sql -->
    <Logger additivity="true" name="com.YZY.mapper" level="DEBUG">
    </Logger>
	<!-- Root节点用来指定项目的根日志,如果没有单独指定Logger,那么就会默认使用该Root日志输出 -->
    <Root level="DEBUG" includeLocation="true">
      <AppenderRef ref="console"/>
      <AppenderRef ref="YZY.SYSTEM"/>
      <AppenderRef ref="YZY.ERROR"/>
      <AppenderRef ref="YZY.AUDIT"/>
    </Root>
  </loggers>
</configuration>

四、性能对比结果

在这里插入图片描述

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值