配置:file-flume-kafka.conf
flume中主要用到的就是配置文件
com.bigdata.flume.interceptor.LogETLInterceptor和com.bigdata.flume.interceptor.LogTypeInterceptor是自定义的拦截器的全类名。需要根据用户自定义的拦截器做相应修改。
拦截器:它的里面有两个自定义的拦截器,一个是数据清洗的拦截器,一个是日志类型的拦截器。
拦截器就相当于map,有kv值。
key :就是头,给他个名字叫做topic ,value:里面放着日志类型satrt和event。发往channle
a1.sources=r1
a1.channels=c1 c2 #组件定义
# configure source
a1.sources.r1.type = TAILDIR #taildir方式读取数据
a1.sources.r1.positionFile = /opt/module/flume/test/log_position.json #记录日志读取位置
a1.sources.r1.filegroups = f1
a1.sources.r1.filegroups.f1 = /tmp/logs/app.+ #读取日志位置
a1.sources.r1.fileHeader = true
a1.sources.r1.channels = c1 c2
#interceptor
a1.sources.r1.interceptors = i1 i2
a1.sources.r1.interceptors.i1.type = com.bigdata.flume.interceptor.LogETLInterceptor$Builder #etl拦截器
a1.sources.r1.interceptors.i2.type = com.bigdata.flume.interceptor.LogTypeInterceptor$Builder #日志类型拦截器
a1.sources.r1.selector.type = multiplexing #根据日志类型分数据
a1.sources.r1.selector.header = topic
a1.sources.r1.selector.mapping.topic_start = c1
a1.sources.r1.selector.mapping.topic_event = c2
# configure channel
a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c1.kafka.bootstrap.servers = bigdata02:9092,bigdata03:9092,bigdata04:9092
a1.channels.c1.kafka.topic = topic_start #日志类型是start,数据发往channel
a1.channels.c1.parseAsFlumeEvent = false
a1.channels.c1.kafka.consumer.group.id = flume-consumer
a1.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel
a1.channels.c2.kafka.bootstrap.servers = bigdata02:9092,bigdata03:9092,bigdata04:9092
a1.channels.c2.kafka.topic = topic_event #日志类型是event,数据发往channle
a1.channels.c2.parseAsFlumeEvent = false
a1.channels.c2.kafka.consumer.group.id = flume-consumer
代码思路:
1.创建一个类,实现接口(implements)叫Interceptor;
2.需要实现4个方法
initialize() 初始化
Event intercept(Event event) 单个事件进行过滤
List<Event> intercept(List<Event> events) 多个事件进行过滤
close() 关闭
public class LogETLInterceptor implements Interceptor {
@Override
public void initialize() {
}
@Override
public Event intercept(Event event) {
// 1 获取数据 body(日志)字节数组,
byte[] body = event.getBody();
String log = new String(body, Charset.forName("UTF-8"));
// 2 判断数据类型并向Header中赋值
if (log.contains("start")) {
if (LogUtils.validateStart(log)){
return event;
}
}else {
if (LogUtils.validateEvent(log)){
return event;
}
}
// 3 返回校验结果
return null;
}
@Override
public List<Event> intercept(List<Event> events) {
ArrayList<Event> interceptors = new ArrayList<>();
for (Event event : events) {
Event intercept1 = intercept(event);
if (intercept1 != null){
interceptors.add(intercept1);
}
}
return interceptors;
}
@Override
public void close() {
}
public static class Builder implements Interceptor.Builder{
@Override
public Interceptor build() {
return new LogETLInterceptor();
}
@Override
public void configure(Context context) {
}
}
}