<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-access</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
<dependency>
<groupId>com.github.danielwegener</groupId>
<artifactId>logback-kafka-appender</artifactId>
<version>0.2.0-RC1</version>
</dependency>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>5.0</version>
</dependency>
以上为pom地址
resources文件下新建logback.xml 内容为:
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<!-- 相对路径 -->
<property name="log.directory" value="./"/>
<property name="logName" value="lnyd"/>
<!-- 控制台输出 -->
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出,%d:日期;%thread:线程名;%-5level:级别,从左显示5个字符宽度;%msg:日志消息;%n:换行符-->
<pattern>
%date{yyyy-MM-dd HH:mm:ss} | %highlight(%-5level) | %boldYellow(%thread) | %boldGreen(%logger) | %msg%n
</pattern>
</encoder>
</appender>
<appender name="debug" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.directory}/logs/${logName}.debug.log</File>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{40} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.directory}/logs/${logName}.debug.%d{yyyy-MM-dd}.log</fileNamePattern>
</rollingPolicy>
</appender>
<appender name="info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.directory}/logs/${logName}.info.log</File>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.directory}/logs/${logName}.info.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{40} - %msg%n</pattern>
</encoder>
</appender>
<appender name="error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${log.directory}/logs/${logName}.error.log</File>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.directory}/logs/${logName}.error.%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{40} - %msg%n</pattern>
</encoder>
</appender>
<!-- 按包区分日志级别 -->
<logger name="org.springframework" level="INFO"/>
<logger name="com.linktech" level="DEBUG"/>
<logger name="com.mchange.v2.c3p0.impl.C3P0PooledConnectionPool" level="INFO"/>
<logger name="com.mchange.v2.resourcepool.BasicResourcePool" level="INFO"/>
<!--false:表示只用当前logger的appender-ref。 true:表示当前logger的appender-ref和rootLogger的appender-ref都有效 -->
<logger name="hk.linktech.b2b.EventBusDemo" level="INFO" additivity="false">
<appender-ref ref="stdout"/>
<appender-ref ref="info"/>
<appender-ref ref="error"/>
</logger>
<appender name="kafkaAppender" class="com.github.danielwegener.logback.kafka.KafkaAppender">
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"appname":"webdemo"}</customFields>
<includeMdc>true</includeMdc>
<includeContext>true</includeContext>
<throwableConverter class="net.logstash.logback.stacktrace.ShortenedThrowableConverter">
<maxDepthPerThrowable>30</maxDepthPerThrowable>
<rootCauseFirst>true</rootCauseFirst>
</throwableConverter>
</encoder>
<topic>my-log-topics</topic>
<keyingStrategy class="com.github.danielwegener.logback.kafka.keying.HostNameKeyingStrategy"/>
<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.BlockingDeliveryStrategy"/>
<!--<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy"/>-->
<!--<producerConfig>bootstrap.servers=39.98.175.140:9092</producerConfig>-->
<producerConfig>bootstrap.servers=192.168.88.99:9092</producerConfig>
<producerConfig>acks=all</producerConfig>
<producerConfig>batch.size=16384</producerConfig>
<producerConfig>delivery.timeout.ms=60000</producerConfig>
<!--<producerConfig>linger.ms=1</producerConfig>-->
<producerConfig>buffer.memory=33554432</producerConfig>
</appender>
<logger name="elk">
<appender-ref ref="kafkaAppender"/>
</logger>
<root level="DEBUG">
<appender-ref ref="stdout"/>
<appender-ref ref="info"/>
<appender-ref ref="debug"/>
<appender-ref ref="error"/>
</root>
<!--<root level="INFO">-->
<!--<appender-ref ref="stdout"/>-->
<!--<appender-ref ref="info"/>-->
<!--<appender-ref ref="debug"/>-->
<!--<appender-ref ref="error"/>-->
<!--</root>-->
</configuration>
至此,java代码中的使用kafka向elk中发送日志就完毕了.