应该场景为监控上传的日志文件目录,日志文件分为sdk和api两种Json格式文件,Flume采用断点续传,对两个类型文件目录进行监控,对类型进行区分,并过滤Json格式不合法的日志,最后发送到Kafka对应类型的Topic。
平台:CDH6.2,Flume1.9
配置
CDH -> Flume -> 实例 -> 选择节点 -> 配置
配置
a1.sources=r1
a1.channels=c1 c2
a1.sinks=k1 k2
# source
a1.sources.r1.type = TAILDIR
# 断点续传记录文件,建好父目录和文件并修改读写权限
a1.sources.r1.positionFile = /data3/flume/log_position.json
# 监控目录,配置多个filegroups实现多文件监控
a1.sources.r1.filegroups = f1 f2
a1.sources.r1.filegroups.f1= /data3/cp-api/logs/.*
a1.sources.r1.filegroups.f2= /data3/cp-sdk/logs/.*
a1.sources.r1.fileHeader = true
a1.sources.r1.channels = c1 c2
# interceptor
# 拦截器过滤Json数据不完整的日志
# a1.sources.r1.interceptors = i1 i2
# a1.sources.r1.interceptors.i1.type = com.sm.flume.interceptor.LogETLInterceptor$Builder
# 日志类型区分,不同类型日志发往kafka的不同topic
# a1.sources.r1.interceptors.i2.type = com.sm.flume.interceptor.LogTypeInterceptor$Builder
# selector
# 选择器根据api和sdk类型发送到不同channel
a1.sources.r1.selector.type = multiplexing
a1.sources.r1.selector.header = topic
a1.sources.r1.selector.mapping.topic_api = c1
a1.sources.r1.selector.mapping.topic_sdk = c2
# channel
a1.channels.c1.type = memory
a1.channels.c1.capacity=10000
a1.channels.c1.byteCapacityBufferPercentage=20
a1.channels.c2.type = memory
a1.channels.c2.capacity=10000
a1.channels.c2.byteCapacityBufferPercentage=20
# api-sink
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.kafka.topic = topic_api
a1.sinks.k1.kafka.bootstrap.servers = cdh-master:9092,cdh-slave01:9092,cdh-slave02:9092
a1.sinks.k1.kafka.flumeBatchSize = 2000
a1.sinks.k1.kafka.producer.acks = 1
a1.sinks.k1.channel = c1
# sdk-sink
a1.sinks.k2.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k2.kafka.topic = topic_sdk
a1.sinks.k2.kafka.bootstrap.servers = cdh-master:9092,cdh-slave01:9092,cdh-slave02:9092
a1.sinks.k2.kafka.flumeBatchSize = 2000
a1.sinks.k2.kafka.producer.acks = 1
a1.sinks.k2.channel = c2
拦截器
maven项目
com.job.flume.intercetor
pom依赖
<dependencies>
<dependency>
<groupId>org.apache.flume</groupId>
<artifactId>flume-ng-core</artifactId>
<version>1.9.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
LogETLInterceptor
过滤Json格式不合法的日志类 。
public class LogETLInterceptor implements Interceptor {
@Override
public void initialize() {
}
@Override
public Event intercept(Event event) {
// 获取body
byte[] body = event.getBody();
String log = new String(body, Charset.forName("UTF-8"));
// 校验json
if (LogUtils.validate(log)) {
return event;
}
return null;
}
@Override
public List<Event> intercept(List<Event> events) {
ArrayList<Event> interceptors = new ArrayList<>();
for (Event event : events) {
Event intercept1 = intercept(event);
if (intercept1 != null) {
interceptors.add(event);
}
}
return interceptors;
}
@Override
public void close() {
}
public static class Builder implements Interceptor.Builder {
@Override
public Interceptor build() {
return new LogETLInterceptor();
}
@Override
public void configure(Context context) {
}
}
}
LogTypeInterceptor
日志类型区分的类。
public class LogTypeInterceptor implements Interceptor {
@Override
public void initialize() {
}
@Override
public Event intercept(Event event) {
// 将body里的数据根据类型,写到header
// 1 获取body数据
byte[] body = event.getBody();
String log = new String(body, Charset.forName("UTF-8"));
// 2 获取header
Map<String, String> headers = event.getHeaders();
// 3 判断
if (log.contains("cp_api")){
headers.put("topic","topic_cp_api");
}else {
headers.put("topic","topic_cp_sdk");
}
return event;
}
@Override
public List<Event> intercept(List<Event> events) {
ArrayList<Event> interceptors = new ArrayList<>();
for (Event event : events) {
Event intercept = intercept(event);
interceptors.add(intercept);
}
return interceptors;
}
@Override
public void close() {
}
public static class Builder implements Interceptor.Builder{
@Override
public Interceptor build() {
return new LogTypeInterceptor();
}
@Override
public void configure(Context context) {
}
}
}
工具类
// json
// [{"cp_game_id":2,"event":{"event_name":"money_Flow","event_time":"1567435887512"},
// "data":{"after_count":604,"reason":"战斗消耗","role":"法师","item_id":"装备","after_money":90627,
// "count":67, "combat":99342,"money_type":"金币", "sub_reason":"战斗药水消耗","role_name":"剁",
// "role_vip":8,"school":"唐门"}}]
public class LogUtils {
public static boolean validate(String log) {
if (log == null){
return false;
}
// 校验json
if (!log.trim().startsWith("{") || !log.trim().endsWith("}")){
return false;
}
return true;
}
}
打包,上传
没有依赖的jar即可,Flume的lib目录下有依赖包。
将flumeintercepter-1.0-SNAPSHOT.jar上传到服务器各节点
/opt/cloudera/parcels/CDH-6.2.0-1.cdh6.2.0.p0.967373/lib/flume-ng/lib/
重启Flume。