》brew install flume
1。配置spool方式的监测(即检测文件目录)
》vi flume.conf
a1.sources = r1
a1.channels = c1
a1.sinks = k1
#sources 检测目录
a1.sources.r1.ignorePattern = ^(.)*\\.tmp$
a1.sources.r1.type = spooldir
a1.sources.r1.spoolDir =/Users/jimmy/Downloads/download/pv_UV
a1.sources.r1.channels = c1
a1.sources.r1.deserializer.maxLineLength = 20480
#用于临时存放 channels
a1.channels.c1.type = file
a1.channels.c1.checkpointDir = /Users/jimmy/flume/checkpoint
a1.channels.c1.dataDirs = /Users/jimmy/flume/data
#sinks 目的地
a1.sinks.k1.type = hdfs
a1.sinks.k1.hdfs.path = /user/jimmy/flume/%Y-%m-%d/%H
a1.sinks.k1.channel = c1
a1.sinks.k1.hdfs.filePrefix = flume
a1.sinks.k1.hdfs.rollSize = 102400
a1.sinks.k1.hdfs.rollCount = 1000
a1.sinks.k1.hdfs.useLocalTimeStamp = true
2.运行
/usr/local/Cellar/flume/1.6.0/libexec/conf》../bin/flume-ng agent --conf conf --conf-file flume.conf --name a1 -Dflume.root.logger=INFO,console
3.把hive表与flume的导出hdfs数据相结合
A)创建hive表:
create table track_log_flume (
id string ,
url string ,
referer string ,
keyword string ,
type string ,
guid string ,
pageId string ,
moduleId string ,
linkId string ,
attachedInfo string ,
sessionId string ,
trackerU string ,
trackerType string ,
ip string ,
trackerSrc string ,
cookie string ,
orderCode string ,
trackTime string ,
endUserId string ,
firstLink string ,
sessionViewNo string ,
productId string ,
curMerchantId string ,
provinceId string ,
cityId string )
PARTITIONED BY (ds string,hour string)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t';
B)修改conf(红色重要,解决数据格式与原文本文件/Users/jimmy/Downloads/download/pv_UV/2015082818格式不一致问题)
#sinks
a1.sinks.k1.type = hdfs
a1.sinks.k1.hdfs.path = /user/jimmy/flume/%Y-%m-%d/%H
a1.sinks.k1.channel = c1
a1.sinks.k1.hdfs.filePrefix = flume
a1.sinks.k1.hdfs.rollSize = 102400
a1.sinks.k1.hdfs.rollCount = 1000
a1.sinks.k1.hdfs.writeFormat = Text
a1.sinks.k1.hdfs.fileType=DataStream
C)
hive> alter table track_log_flume add partition(ds='2016-12-25',hour='16') location '/user/jimmy/flume/2016-12-25/16';