1. 安装配置
1.1. 解压安装
tar -zxvf apache-flume-1.6.0-bin.tar.gz -C /opt
mv apache-flume-1.6.0-bin flume
1.2. 配置环境变量
vim /etc/profile
export FLUME_HOME=/opt/flume
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$FLUME_HOME/bin:
保存退出后,刷新profile
source /etc/profile
1.3. 验证
#查看flume版本:
[root@alone opt]# flume-ng version
Flume 1.8.0
Source code repository: https://git-wip-us.apache.org/repos/asf/flume.git
Revision: 99f591994468633fc6f8701c5fc53e0214b6da4f
Compiled by denes on Fri Sep 15 14:58:00 CEST 2017
From source with checksum fbb44c8c8fb63a49be0a59e27316833d
#出现上面的信息,表示安装成功了
2.实时采集案例---参见官网,对每种类型的source、channel、sink有详细的解释和示例
agent2.sources = s1
agent2.channels = c1 c2
agent2.sinks = k1 k2
# 定义source
#监控文件
#agent2.sources.s1.type=exec
#agent2.sources.s1.command=tail -F /test/data/test.log
#agent2.sources.s1.channels = c1 c2
agent2.sources.s1.type = avro
agent2.sources.s1.bind=172.16.90.62
agent2.sources.s1.port=4141
agent2.sources.s1.channels = c1 c2
# 定义channels
#------- c1-------------------------
agent2.channels.c1.type = memory
agent2.channels.c1.capacity = 100000
agent2.channels.c1.transactionCapacity = 100000
#------- c2-------------------------
agent2.channels.c2.type = memory
agent2.channels.c2.capacity=10000
agent2.channels.c2.transactionCapacity=1000
# 定义sinks
#---------k1 ------------------
agent2.sinks.k1.type=org.apache.flume.sink.kafka.KafkaSink
agent2.sinks.k1.brokerList=app1:6667,app2:6667
agent2.sinks.k1.topic=kafkatest2
agent2.sinks.k1.serializer.class=kafka.serializer.StringEncoder
agent2.sinks.k1.channel=c1
#---------k2 ------------------
agent2.sinks.k2.type = hdfs
agent2.sinks.k2.channel = c2
agent2.sinks.k2.hdfs.path = /flume/events/%y-%m-%d/%H%M/
agent2.sinks.k2.hdfs.filePrefix = events-
agent2.sinks.k2.hdfs.round = true
#集合roundUnit可以按时间生成文件夹,单位分钟
agent2.sinks.k2.hdfs.roundValue = 10
agent2.sinks.k2.hdfs.roundUnit = minute
#hdfs sink间隔多长将临时文件滚动成最终目标文件,单位:秒;
agent2.sinks.k2.hdfs.rollInterval = 300
#当临时文件达到该大小(单位:bytes)时,滚动成目标文件
agent2.sinks.k2.hdfs.rollSize = 0
#当events数据达到该数量时候,将临时文件滚动成目标文件;
#agent2.sinks.k2.hdfs.rollCount = 300
#每个批次刷新到HDFS上的events数量;
agent2.sinks.k2.hdfs.batchSize = 10
agent2.sinks.k2.hdfs.useLocalTimeStamp = true
#生成的文件类型,默认是Sequencefile,可用DataStream,则为普通文本
agent2.sinks.k2.hdfs.fileType = DataStream
Flume除了以上介绍的内容,还支持定义各种拦截器,用来做一些简单的数据清洗,可以自己去官网学习