上传put C:\Users\19668\Desktop\2\apache-flume-1.9.0-bin.tar.gz
解压tar -zxvf apache-flume-1.9.0-bin.tar.gz -C app/
cd /home/hadoop/app/apache-flume-1.9.0-bin/conf
新建一个配置文件 vi my.conf
a1.sources = r1
a1.sinks = k1
a1.channels = c1
a1.sources.r1.type = exec
a1.sources.r1.command = tail -F /home/hadoop/flumedata/1.log
a1.sinks.k1.type = logger
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
打开另一个zjgm01
新建一个flumedata
[hadoop@zjgm01 ~]$ mkdir flumedata
[hadoop@zjgm01 ~]$ cd flumedata/
[hadoop@zjgm01 flumedata]$ ll
total 0
[hadoop@zjgm01 flumedata]$ pwd
/home/hadoop/flumedata
[hadoop@zjgm01 flumedata]$ echo 1 > 1.log 替换 echo 1 >> 1.log 追加
[hadoop@zjgm01 flumedata]$ ll
启动flume
cd /home/hadoop/app/apache-flume-1.9.0-bin
[hadoop@zjgm01 apache-flume-1.9.0-bin]$ bin/flume-ng agent --conf conf --conf-file conf/my.conf --name a1 -Dflume.root.logger=INFO,console
在另一个zjgm01的 1.log.追加数据
echo wms yrz >> 1.log
显示1.log的所有数据 日志采集
cd /home/hadoop/app/apache-flume-1.9.0-bin/conf
再打开zjgm01 创建一个 ks.conf
a1.sources = r1
a1.sinks = k1
a1.channels = c1
a1.sources.r1.type = exec
a1.sources.r1.command = tail -F /home/hadoop/flumedata/1.log
a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.k1.brokerList= zjgm01:9092,zjgm02:9092,zjgm03:9092
a1.sinks.k1.topic=dsj
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
cd /home/hadoop/app/zookeeper-3.4.5/bin,
分别启动三台zookeeper
./zkServer.sh start”
cd /home/hadoop/app/kafka_2.11-0.11.0.2路径,分别启动三台kafka
bin/kafka-server-start.sh config/server.properties
cd /home/hadoop/app/apache-storm-0.9.2-incubating/bin/启动三台storm
zjgm01
./storm nimbus
zjgm02 zjgm03
./storm supervisor
启动flume
cd /home/hadoop/app/apache-flume-1.9.0-bin
bin/flume-ng agent --conf conf --conf-file conf/ks.conf --name a1 -Dflume.root.logger=INFO,console
运行stormkafka 打开c盘 storm
cd flumedata
echo xybaixh >> 1.log
在storm里面追加
配置启动路径
zjgm01 zjgm02 zjgm03
[hadoop@zjgm01 app]$ pwd
/home/hadoop/app
sudo vi /etc/profile
export JAVA_HOME=/home/hadoop/app/jdk
export HADOOP_HOME=/home/hadoop/app/hadoop-2.4.1
export HIVE_HOME=/home/hadoop/app/hive-0.12.0
export ZK_HOME=/home/hadoop/app/zookeeper-3.4.5
export KAFKA_HOME=/home/hadoop/app/kafka_2.11-0.11.0.2
export STORM_HOME=/home/hadoop/app/apache-storm-0.9.2-incubating
export FLUME_HOME=/home/hadoop/app/apache-flume-1.9.0-bin
export PATH=
P
A
T
H
:
PATH:
PATH:JAVA_HOME/bin:
H
A
D
O
O
P
H
O
M
E
/
b
i
n
:
HADOOP_HOME/bin:
HADOOPHOME/bin:HADOOP_HOME/sbin:
H
I
V
E
H
O
M
E
/
b
i
n
:
HIVE_HOME/bin:
HIVEHOME/bin:ZK_HOME/bin:
K
A
F
K
A
H
O
M
E
/
b
i
n
:
KAFKA_HOME/bin:
KAFKAHOME/bin:STORM_HOME/bin:$FLUME_HOME/bin
source /etc/profile