一、添加需要的jar包
去掉springboot项目自带的日志jar包,添加kafka和log4j的jar包
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
二、项目配置文件(application.properties)添加log4j配置文件
logging.config = classpath:log4j-spring-kafka.xml
三、log4j配置文件
<?xml version="1.0" encoding="UTF-8"?>
<!-- Configuration后面的status,这个用于设置log4j2自身内部的信息输出,可以不设置,当设置成trace时, 你会看到log4j2内部各种详细输出。可以设置成OFF(关闭)或Error(只输出错误信息) -->
<Configuration status="OFF">
<properties>
<property name="kafka_bootstrap_servers">0.0.0.0:9092</property>
<property name="kafka_retries">3</property>
<property name="kafka_linger_ms">1000</property> <!-- 如果你想减少请求的数量,你可以将linger.ms设置为大于某个值的东西 -->
<property name="kafka_batch_size">2048</property> <!-- 缓冲区大小 -->
<property name="kafka_buffer_memory">10485760</property> <!-- 控制生产者可用于缓冲的存储器的总量 -->
</properties>
<Appenders>
<!-- 输出控制台日志的配置 -->
<Console name="console" target="SYSTEM_OUT">
<!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch) -->
<ThresholdFilter level="DEBUG" onMatch="ACCEPT" onMismatch="DENY" />
<!-- 输出日志的格式 -->
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss SSS} [%t] %-5level %logger{36} - %msg%n" />
</Console>
<!-- syncSend指是否同步等待,设为false表示发送消息后立即返回,true则会等待kafka响应后返回 -->
<Kafka name="buried" topic="buried" syncSend="false">
<Filters>
<MarkerFilter marker="buried" onMatch="ACCEPT" onMismatch="DENY"/>
<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY" />
</Filters>
<PatternLayout charset="UTF-8" pattern="%d{yyyy-MM-dd HH:mm:ss SSS} [%t] %-5level %logger{36} %marker - %msg%n"/>
<Property name="bootstrap.servers" value="${kafka_bootstrap_servers}"/>
<Property name="retries" value="${kafka_retries}" />
<Property name="linger.ms" value="${kafka_linger_ms}" />
<Property name="batch.size" value="${kafka_batch_size}" />
<Property name="buffer.memory" value="${kafka_buffer_memory}" />
</Kafka>
<Kafka name="monitor" topic="monitor" syncSend="false">
<Filters>
<MarkerFilter marker="monitor" onMatch="ACCEPT" onMismatch="DENY"/>
<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY" />
</Filters>
<PatternLayout charset="UTF-8" pattern="%d{yyyy-MM-dd HH:mm:ss SSS} [%t] %-5level %logger{36} %marker - %msg%n"/>
<Property name="bootstrap.servers" value="${kafka_bootstrap_servers}"/>
<Property name="retries" value="${kafka_retries}" />
<Property name="linger.ms" value="${kafka_linger_ms}" />
<Property name="batch.size" value="${kafka_batch_size}" />
<Property name="buffer.memory" value="${kafka_buffer_memory}" />
</Kafka>
<Async name="async">
<AppenderRef ref="buried"/>
<AppenderRef ref="monitor"/>
</Async>
</Appenders>
<Loggers>
<!-- name 必填且唯一 -->
<!-- level 设置输出最低级别 默认error -->
<!-- additivity 是否在父Logger输出, 默认 true -->
<Logger name="com.start" level="trace" additivity="true">
<AppenderRef ref="async" />
</Logger>
<!-- 必须包含一个 Root 元素 -->
<Root level="debug">
<AppenderRef ref="console" />
</Root>
</Loggers>
</Configuration>
四、写日志
protected static final Logger LOGGER = LogManager.getLogger(FlumeStartApplication.class);
public static final Marker BURIED = MarkerManager.getMarker("buried");
public static final Marker MONITOR = MarkerManager.getMarker("monitor");
public static void main(String[] args) {
SpringApplication.run(FlumeStartApplication.class, args);
int i = 0;
while (true) {
i++;
if (i%3 == 0) {
LOGGER.info(BURIED,"BURIED埋点消息{}", System.currentTimeMillis()/1000);
} else if(i%3 == 1) {
LOGGER.debug(BURIED,"BURIED埋点消息{}", System.currentTimeMillis()/1000);
} else {
LOGGER.info(MONITOR,"MONITOR监控消息{}", System.currentTimeMillis()/1000);
}
try {
Thread.sleep(2000);
System.out.println("睡眠一会儿=======");
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}