小编是个新人,见谅
首先构建一个springBoot 项目
这里就不过多介绍
- 1 导入依赖
<dependency>
<groupId>com.teambytes.logback</groupId>
<artifactId>logback-flume-appender_2.10</artifactId>
<version>0.0.9</version>
</dependency>
-
2 创建一个logback-spring.xml文件
-
3 配置文件中写入
<configuration>
<appender name="consoleAppender"
class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yyy MMM dd HH:mm:ss.SSS} [%thread] %-5level %logger{36}:%L- %msg%n
</pattern>
</encoder>
</appender>
<appender name="fileAppender"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 把日志文件输出到:项目启动的目录下的log文件夹(无则自动创建)下 -->
<file>log/logFile.log</file>
<!-- 把日志文件输出到:name为logFilePositionDir的property标签指定的位置下 -->
<!-- <file>${logFilePositionDir}/logFile.log</file> -->
<!-- 把日志文件输出到:当前磁盘下的log文件夹(无则自动创建)下 -->
<!-- <file>/log/logFile.log</file> -->
<rollingPolicy
class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- TimeBasedRollingPolicy策略会将过时的日志,另存到指定的文件中(无该文件则创建) -->
<!-- 把因为 过时 或 过大 而拆分后的文件也保存到目启动的目录下的log文件夹下 -->
<fileNamePattern>log/logFile.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!-- 设置过时时间(单位:<fileNamePattern>标签中%d里最小的时间单位) -->
<!-- 系统会删除(分离出去了的)过时了的日志文件 -->
<!-- 本人这里:保存以最后一次日志为准,往前7天以内的日志文件 -->
<MaxHistory>
7
</MaxHistory>
<!-- 滚动策略可以嵌套;
这里嵌套了一个SizeAndTimeBasedFNATP策略,
主要目的是: 在每天都会拆分日志的前提下,
当该天的日志大于规定大小时,
也进行拆分并以【%i】进行区分,i从0开始
--><!-- 滚动策略可以嵌套;
这里嵌套了一个SizeAndTimeBasedFNATP策略,
主要目的是: 在每天都会拆分日志的前提下,
当该天的日志大于规定大小时,
也进行拆分并以【%i】进行区分,i从0开始
-->
<timeBasedFileNamingAndTriggeringPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>5MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
<encoder>
<pattern>%d{yyy MMM dd HH:mm:ss.SSS} [%thread] %-5level %logger{36}:%L- %msg%n
</pattern>
</encoder>
</appender>
<appender name="flumeTest" class="com.teambytes.logback.flume.FlumeLogstashV1Appender">
<!--这里要写自己主机的IP 或者 hostname(要做IP映射)-->
<flumeAgents>
SparkYarn:44444
</flumeAgents>
<flumeProperties>
connect-timeout=4000;
request-timeout=8000
</flumeProperties>
<batchSize>100</batchSize>
<reportingWindow>1000</reportingWindow>
<additionalAvroHeaders>
myHeader = myValue
</additionalAvroHeaders>
<application>JustryDeng's Application</application>
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>%d{HH:mm:ss.SSS} %-5level %logger{36} - \(%file:%line\) - %message%n%ex</pattern>
</layout>
</appender>
<logger name="com" level="info">
<appender-ref ref="flumeTest"/>
</logger>
<root level="info">
<appender-ref ref="consoleAppender"/>
</root>
</configuration>
- 4 代码
package com.lbzheng.demo;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class LastprojectApplicationTests {
public static void main(String[] args) {
SpringApplication.run(LastprojectApplicationTests.class,args);
}
private static final Logger LOGGER = LoggerFactory.getLogger(LastprojectApplicationTests.class);
@Test
public void contextLoads() {
for (int i = 0; i < 10; i++) {
LOGGER.info("我是第"+i+"条信息");
}
try{
Thread.sleep(10000);
}catch (Exception e){
e.printStackTrace();
}
}
}
服务器端
安装 flume
- 1 tar包安装
tar -zxf apache-flume-1.9.0-bin.tar.gz
- 2 创建一个 properties 文件
[root@CentOS apache-flume-1.9.0-bin]# vi conf/demo01.properties
# 声明组件信息
a1.sources = s1
a1.sinks = sk1
a1.channels = c1
# 组件配置
a1.sources.s1.type = avro
#这里是flume的工作节点
a1.sources.s1.bind = SparkYarn
a1.sources.s1.port = 44444
a1.channels.c1.type = memory
a1.sinks.sk1.type = org.apache.flume.sink.kafka.KafkaSink
#这里需要kafka的主机ip
a1.sinks.sk1.kafka.bootstrap.servers = SparkYarn:9092
a1.sinks.sk1.kafka.topic = topic01
a1.sinks.sk1.kafka.flumeBatchSize = 20
a1.sinks.sk1.kafka.producer.acks = 1
a1.sinks.sk1.kafka.producer.linger.ms = 1
# 链接组件
a1.sources.s1.channels = c1
a1.sinks.sk1.channel = c1
- 3 启动
[root@CentOS apache-flume-1.9.0-bin]# ./bin/flume-ng agent --conf conf/ --conf-file conf/demo01.properties --name a1
- 4 kafka节点
[root@SparkYarn kafka_2.11-2.2.0]# ./bin/kafka-console-consumer.sh --bootstrap-server SparkYarn:9092 --topic topic01
结果
21:18:36.881 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第0条信息
21:18:36.883 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第1条信息
21:18:36.884 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第2条信息
21:18:36.885 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第3条信息
21:18:36.885 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第4条信息
21:18:36.886 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第5条信息
21:18:36.886 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第6条信息
21:18:36.887 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第7条信息
21:18:36.888 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第8条信息
21:18:36.889 INFO c.l.demo.LastprojectApplicationTests - (LastprojectApplicationTests.java:23) - 我是第9条信息