Springboot+log4j2+Kafka日志输出

  1. log4j2,kafka异步输出的优势这里不过多赘述
  2. 准备步骤
    –去掉springboot默认依赖,引入Log4j2
<dependency>  
    <groupId>org.springframework.boot</groupId>  
    <artifactId>spring-boot-starter-web</artifactId>  
    <exclusions>
        <exclusion>  
            <groupId>org.springframework.boot</groupId>  
            <artifactId>spring-boot-starter-logging</artifactId>  
        </exclusion>  
    </exclusions>  
</dependency> 
<dependency>
    <groupId>org.springframework.boot</groupId>  
    <artifactId>spring-boot-starter-log4j2</artifactId>  
</dependency> 

–项目中引入kafka相关依赖

    <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.5.0</version>
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>log4j</groupId>
                    <artifactId>log4j</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.13.1</version>
        </dependency>
  1. 编写日志配置文件,springboot默认识别的配置文件名:log4j2-spring.xml,若更改文件名,需在application.yaml文件中配置
logging:
  path: logs/
    config: log4j2-dev.xml
<?xml version="1.0" encoding="utf-8"?>
<configuration>
    <properties>
        <property name="LOG_HOME">${LOG_PATH}</property>
        <Property name="KAFKA_SERVERS">10.210.100.17:9092</Property>>
        <property name="FILE_PATTERN">{"timeMillis":"%d","thread":"%t","threadId":"%T","traceId":"%X{X-B3-TraceId}","level":"%-5p","className":"%c{1.}.%M(%L)","message":"%m","exception":"%rEx"}%n</property>
        <property name="CONSOLE_PATTERN">%d  %style{[%t]-[%T]}{Cyan} %style{[%X{X-B3-TraceId}]}{Magenta}--%highlight{%-5p}-%c{1.}.%M(%L)-%m-%highlight{%rEx}%n</property>

    </properties>

    <Appenders>
        <Console name="Console" target="SYSTEM_OUT" ignoreExceptions="true">
            <PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
        </Console>
        <Kafka name="Kafka" topic="log-topic" ignoreExceptions="false">
            <PatternLayout pattern="${CONSOLE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
            <Property name="bootstrap.servers">${KAFKA_SERVERS}</Property>
            <Property name="timeout.ms">4000</Property>
        </Kafka>

        <RollingFile name="INFO_ROLLING_FILE"
                     fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-info.log"
                     filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/info-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
            <!--ThresholdFilter :日志输出过滤-->
            <!--level="info" :日志级别,onMatch="ACCEPT" :级别在info之上则接受,onMismatch="DENY" :级别在info之下则拒绝-->
            <ThresholdFilter level="info" onMatch="ACCEPT" onMismatch="DENY"/>
            <PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>
            <Policies>
                <TimeBasedTriggeringPolicy interval="4" modulate="true" />
                <SizeBasedTriggeringPolicy size="100 MB"/>
            </Policies>
            <!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件,这里设置了20 -->
            <DefaultRolloverStrategy max="20"/>
        </RollingFile>
        <RollingFile name="WARN_ROLLING_FILE"
                     fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-warn.log"
                     filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/warn-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
            <ThresholdFilter level="warn" onMatch="ACCEPT" onMismatch="DENY"/>
<!--            <JsonLayout compact="true" eventEol = "true"/>-->
            <PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>

            <Policies>
                <TimeBasedTriggeringPolicy interval="4" modulate="true" />
                <SizeBasedTriggeringPolicy size="100 MB"/>
            </Policies>
            <DefaultRolloverStrategy max="20"/>
        </RollingFile>
        <RollingFile name="ERROR_ROLLING_FILE"
                     fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-error.log"
                     filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/error-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
            <ThresholdFilter level="error" onMatch="ACCEPT" onMismatch="DENY"/>
<!--            <JsonLayout compact="true" complet="true" />-->
            <PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>

            <Policies>
                <TimeBasedTriggeringPolicy interval="4" modulate="true" />
                <SizeBasedTriggeringPolicy size="100 MB"/>
            </Policies>
            <DefaultRolloverStrategy max="20"/>
        </RollingFile>

        <RollingFile name="FAIL_OVER_KAFKA_FILE"
                     fileName="${LOG_HOME}/${fileName}-${date:yyyy-MM-dd}-kafka.log"
                     filePattern="${LOG_HOME}/${fileName}/${date:yyyy-MM}/kafka-%d{yyyy-MM-dd}-%i.log.gz" ignoreExceptions="true">
            <ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
            <PatternLayout pattern="${FILE_PATTERN}" disableAnsi="false" noConsoleNoAnsi="false"/>

            <Policies>
                <TimeBasedTriggeringPolicy interval="4" modulate="true" />
                <SizeBasedTriggeringPolicy size="100 MB"/>
            </Policies>
            <!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件,这里设置了20 -->
            <DefaultRolloverStrategy max="20"/>
        </RollingFile>

        <Failover name="Failover" primary="Kafka" retryIntervalSeconds="600">
            <Failovers>
                <AppenderRef ref="FAIL_OVER_KAFKA_FILE"/>
<!--                <AppenderRef ref="Console"/>-->
            </Failovers>
        </Failover>

        <Async name="AsyncKafka">
            <AppenderRef ref="Failover"/>
        </Async>
    </Appenders>

    <Loggers>
        <!--过滤掉spring和mybatis的一些无用的DEBUG信息-->
        <!--Logger节点用来单独指定日志的形式,name为包路径,比如要为org.springframework包下所有日志指定为INFO级别等。 -->
        <logger name="org.springframework" level="INFO"></logger>
        <logger name="org.mybatis" level="INFO"></logger>
        <logger name="org.apache.kafka" level="INFO" /> <!-- avoid recursive logging -->

        <Root level="info">
            <AppenderRef ref="Console"/>
            <AppenderRef ref="AsyncKafka"/>
            <AppenderRef ref="WARN_ROLLING_FILE"/>
            <AppenderRef ref="INFO_ROLLING_FILE"/>
            <AppenderRef ref="ERROR_ROLLING_FILE"/>
        </Root>
    </Loggers>
</configuration>

说明:
timeout.ms 为kafka宕机后返回异常的时间,返回异常即触发Failover机制,默认时间30S.返回异常前会阻塞当前请求,所以此时间可以设置小一点。

<JsonLayout compact="true" eventEol = "true"/>

能够将输出的日志格式化成JSON,但是会导致PatternLayout失效。不好自定义输出内容,所以此处自定义JSON格式输出。

  • 1
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
可以通过使用 Spring Cloud Sleuth 和 Spring Cloud Stream 实现在 Spring Boot 应用中将链路日志发送到 Kafka。 首先,在 pom.xml 文件中添加以下依赖: ```xml <dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-starter-sleuth</artifactId> </dependency> <dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-starter-stream-kafka</artifactId> </dependency> ``` 然后,在 application.yml 文件中配置 Sleuth 和 Kafka: ```yaml spring: application: name: my-application sleuth: sampler: probability: 1.0 cloud: stream: kafka: binder: brokers: localhost:9092 bindings: my-log-output: destination: my-topic ``` 接下来,在应用中添加一个 LogMessage 类来表示要发送到 Kafka日志信息: ```java public class LogMessage { private String traceId; private String spanId; private String message; // getters and setters } ``` 然后,在应用中使用 Sleuth 记录链路日志,并将日志信息发送到 Kafka: ```java @RestController public class MyController { private final Tracer tracer; private final MessageChannel myLogOutput; public MyController(Tracer tracer, MessageChannel myLogOutput) { this.tracer = tracer; this.myLogOutput = myLogOutput; } @GetMapping("/hello") public String hello() { Span span = this.tracer.nextSpan().name("my-span").start(); try (Tracer.SpanInScope ws = this.tracer.withSpan(span)) { String traceId = span.context().traceIdString(); String spanId = span.context().spanIdString(); String message = "Hello, world!"; LogMessage logMessage = new LogMessage(); logMessage.setTraceId(traceId); logMessage.setSpanId(spanId); logMessage.setMessage(message); this.myLogOutput.send(new GenericMessage<>(logMessage)); return message; } finally { span.end(); } } } ``` 这样,当应用处理请求时,它将使用 Sleuth 记录链路日志,并将日志信息发送到 Kafka 中的 "my-topic" 主题中。你可以通过消费该主题中的消息来实现对链路日志的监控和分析。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值