pom
<dependency>
<groupId>org.apache.flume.flume-ng-clients</groupId>
<artifactId>flume-ng-log4jappender</artifactId>
<version>1.8.0</version>
</dependency>
<dependency>
<groupId>org.apache.flume</groupId>
<artifactId>flume-ng-core</artifactId>
<version>1.8.0</version>
</dependency>
log4j add
flumeAvro
log4j.appender.flumeAvro=org.apache.flume.clients.log4jappender.Log4jAppender
log4j.appender.flumeAvro.Hostname=10.2.82.55
log4j.appender.flumeAvro.Port=6666
log4j.appender.flumeAvro.UnsafeMode=true
log4j.appender.flumeAvro.layout=org.apache.log4j.PatternLayout
log4j.appender.flumeAvro.layout.ConversionPattern=%m
flume client
bin/flume-ng agent -c conf -f conf/example.conf -H 127.0.0.1 -p 10000 -name a1
example.conf
# example.conf: A single-node Flume configuration
a1.sources = r1
a1.channels = c1
a1.sinks = k1
#a1.sources.src-1.type = spooldir
#a1.sources.src-1.channels = c1
#a1.sources.src-1.spoolDir = d:/log/
#a1.sources.src-1.fileHeader = true
#a1.sources.src-1.batchSize = 1
# For each one of the sources, the type is defined
a1.channels = c1
a1.sources.r1.type = avro
a1.sources.r1.channels = c1
a1.sources.r1.bind = 10.2.82.55
a1.sources.r1.port = 6666
# Use a channel which buffers events in memory
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
# Name the components on this agent
a1.sinks.k1.channel = c1
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.kafka.topic = flume
a1.sinks.k1.kafka.bootstrap.servers = 172.17.60.185:9092,172.17.60.193:9092,172.17.60.87:9092
a1.sinks.k1.kafka.flumeBatchSize = 1
a1.sinks.k1.kafka.producer.acks = 1
a1.sinks.k1.kafka.producer.linger.ms = 1
a1.sinks.k1.kafka.producer.compression.type = snappy