问题
flume采集日志用kafka来广播消息
flume的配置
[root@SZB-L0032016 bin]# cat ../conf/flume_kafka.conf
a.sinks=k1
a.sources=s1 s2
a.channels=r1
#定义source是从文本文件过来
a.sources.s1.type=exec
a.sources.s1.command=tail -F /root/a.log
a.sources.s2.type=exec
a.sources.s2.command=tail -F /root/b.log
#sink 是kafka topic 是test broker 是10.20.25.199:9092
a.sinks.k1.type=org.apache.flume.sink.kafka.KafkaSink
a.sinks.k1.kafka.topic = test
a.sinks.k1.kafka.bootstrap.servers =10.20.25.199:9092
a.sinks.k1.kafka.flumeBatchSize = 20
a.sinks.k1.kafka.producer.acks = 1
a.sinks.k1.kafka.producer.linger.ms = 1
a.sinks.ki.kafka.producer.compression.type = snappy
a.channels.r1.type=file
a.channels.r1.checkpointDir=/root/flume/checkpoint
a.channels.r1.dataDirs=/root/flume/data
a.sources.s2.channels=r1
a.sources.s1.channels=r1
a.sinks.k1.channel=r1
启动flume服务
[root@SZB-L0032016 bin]# ./flume-ng agent --conf conf --conf-file ../conf/flume_kafka.conf --name a -Dflume.root.logger=INFO,console
往flume的source里面增加内容
[root@SZB-L0032016 ~]# echo "test">a.log
[root@SZB-L0032016 ~]# echo "test11111">a.log
启动kafka的消费端
[xulu@SZB-L0032015 bin]$ ./kafka-console-consumer.sh --zookeeper 10.20.25.241:3181 --topic test --from-beginning
this is message
this is aaa
clearrsss
this is the kafka info
1111dddddddd
test big data
test
test11111
可以在kafka消费端查看到flume发送过来的消息