#两个channels 和两个 sink
agent.sources = s
agent.channels = c c1
agent.sinks = r k
#这就是source 把源数据打向两个channels
agent.sources.s.channels = c c1
agent.sources.s.type = exec
agent.sources.s.command = tail -fn 1 /home/hadoop/data/flume-log/flume.log
#下面时两个sink的配置指向两个不同的topic
agent.sinks.r.type = org.apache.flume.plugins.KafkaSink
agent.sinks.r.metadata.broker.list=192.168.179.3:9092,192.168.179.4:9092,192.168.179.5:9092
agent.sinks.r.serializer.class=kafka.serializer.StringEncoder
agent.sinks.r.request.required.acks=-1
agent.sinks.r.max.message.size=1000000
agent.sinks.r.agent.type=sync
agent.sinks.r.custom.encoding=UTF-8
agent.sinks.r.custom.topic.name=make-flume
agent.sinks.k.type = org.apache.flume.plugins.KafkaSink
agent.sinks.k.metadata.broker.list=192.168.179.3:9092,192.168.179.4:9092,192.168.179.5:9092
agent.sinks.k.serializer.class=kafka.serializer.StringEncoder
agent.sinks.k.request.required.acks=-1
agent.sinks.k.max.message.size=1000000
agent.sinks.k.agent.type=sync
agent.sinks.k.custom.encoding=UTF-8
agent.sinks.k.custom.topic.name=test
下面时两个不同的channel管道缓存
agent.sinks.r.channel = c
agent.sinks.k.channel = c1
agent.channels.c.type = memory
agent.channels.c.capacity = 1000000
agent.channels.c.transactionCapacity = 1000000
agent.channels.c1.type = memory
agent.channels.c1.capacity = 1000000
agent.channels.c1.transactionCapacity = 1000000