flume安装 gcc安装 netcat安装 一:flume实现控制台连接文件 二:flume将日志文件上传到hdfs中

flume安装

cd /opt/software/flume190/conf
mv flume-env.sh.template flume-env.sh
export JAVA_HOME=/opt/software/jdk
vim /etc/profile.d/myenv.sh
#flume
export FLUME_HOME=/opt/software/flume190
export PATH= F L U M E H O M E / b i n : FLUME_HOME/bin: FLUMEHOME/bin:PATH

google的guava-xxx.jar 和 hadoop的版本必须一致

cd /opt/software/flume190/lib
ls|grep guava
rm -rf 原来久版本的guava
cp /opt/software/hadoop313/share/hadoop/common/lib/guava-27.0-jre.jar /opt/software/flume190/lib

gcc 安装

yum -y install gcc

netcat安装

yum install -y nc

flume实现控制台连接文件

写配置文件 vim /root/flume_job/logconf/flume01.cnf

a1.sources = s1
a1.channels = c1
a1.sinks = k1

#初始化数据源
a1.sources.s1.type = netcat
a1.sources.s1.bind = 192.168.75.245
a1.sources.s1.port = 6666

#初始化通道
a1.channels.c1.type = memory
a1.channels.c1.capacity = 100
a1.channels.c1.transactionCapacity = 10

#初始化数据槽
a1.sinks.k1.type = logger

#关联组件
a1.sources.s1.channels = c1
a1.sinks.k1.channel = c1

运行命令

flume-ng agent -n a1 -c /opt/software/flume190/conf/ -f /root/flume_job/logconf/flume01.cnf -Dflume.root.logger=INFO,console

flume将日志文件上传到hdfs中

1上面的guawa版本修改

2新建配置文件以及目录

cd /opt/software/flume190
mkdir mydata
mkdir mydata/data mydata/checkpoint
mkdir flume-conf-files
vim flume-conf-files/flume_spooldir_file_hdfs.cnf
#组件声明
a1.sources = s1
a1.channels = c1		
a1.sinks = k1
#初始化数据源
a1.sources.s1.type = spooldir
a1.sources.s1.spoolDir = /root/data/flume
a1.sources.s1.ignorePattern = ^(.)*\\.bak$
a1.sources.s1.fileSuffix = .bak

#初始化通道
a1.channels.c1.type = file
a1.channels.c1.checkpointDir = /opt/software/flume190/mydata/checkpoint
a1.channels.dataDirs = /opt/software/flume190/mydata/data
a1.channels.capacity = 10000
a1.channels.transactionCapacity = 10000

#初始化数据槽
a1.sinks.k1.type = hdfs
a1.sinks.k1.hdfs.path = hdfs://192.168.75.245:9820/flume/events/fakeorder/%y-%m-%d/%H
a1.sinks.k1.hdfs.round = true 	#时间舍弃
a1.sinks.k1.hdfs.roundValue = 10 #时间数值
a1.sinks.k1.hdfs.roundUnit = minute #时间单位
a1.sinks.k1.hdfs.filePrefix = log_%Y%m%d_%H
a1.sinks.k1.hdfs.fileSuffix = .log
a1.sinks.k1.hdfs.useLocalTimeStamp = true
a1.sinks.k1.hdfs.writeFormat = Text
a1.sinks.k1.hdfs.rollCount = 0
a1.sinks.k1.hdfs.rollSize = 134217728
a1.sinks.k1.hdfs.rollInterval = 0
a1.sinks.k1.hdfs.batchSize = 1000
a1.sinks.k1.hdfs.threadsPoolSize = 4
a1.sinks.k1.hdfs.idleTimeout = 0
a1.sinks.k1.hdfs.minBlockReplicas = 1

#关联组件
a1.sources.s1.channels = c1
a1.sinks.k1.channel = c1

3在hdfs中新建目录 /flume/events/fakeorder

在这里插入图片描述

4运行命令

flume-ng agent -n a1 -c /opt/software/flume190/conf/ -f /opt/software/flume190/flume-conf-files/flume_spooldir_file_hdfs.cnf -Dflume.root.logger=INFO,console

5运行成功 登陆http:192.168.75.245:9870 到相应的文件/root/flume_job/logconf/flume01.cnf下查找是否写进文件

6可能的错误 启动以后运行一会 文件出来了 但是报错了 在flume190/bin/flume-ng 中查找此段内容并修改JAVA_OPTS="-Xmx512m"

7可能的错误运行完毕以后不能再次运行

cd ~
ls -la
rm -rf .flume
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值