1、在cdh启动了flume的情况下,在linux运行了命令:
flume-ng agent --conf conf --conf-file conf/flume-conf.properties --name a1 -Dflume.root.logger=INFO,console
配置文件1:flume-conf.properties
a1.sources = r1
a1.sinks = s1
a1.channels = c1
#sources 消息生产
a1.sources.r1.type = spooldir
a1.sources.r1.channels = c1
a1.sources.r1.spoolDir = /data/flume/flume_dir //用于存放收集的日志
a1.sources.r1.fileHeader = false
a1.sources.r1.interceptors = i1
a1.sources.r1.interceptors.i1.type = timestamp
#channels 消息传递
a1.channels.c1.type = memory
a1.channels.c1.capacity = 1000
a1.channels.c1.transactionCapacity = 100
#sinks 消息消费
a1.sinks.s1.type = org.apache.flume.sink.kafka.KafkaSink
a1.sinks.s1.brokerList = node01:9092 //链接kafka
a1.sinks.s1.topics = from_flume//flume收集的日志分发给kafka的对应主题名称
a1.sinks.s1.requiredAcks = 1
a1.sinks.s1.batchSize = 20
a1.sinks.s1.channel = c1 //注意这里是channel不是channels
报错:
[root@node01 flume-ng]# flume-ng agent --conf conf --conf-file conf/flume-conf.properties --name a1 -Dflume.root.logger=INFO,console
Info: Including Hadoop libraries found via (/bin/hadoop) for HDFS access
Info: Including HBASE libraries found via (/opt/cloudera/parcels/CDH-6.0.1-1.cdh6.0.1.p0.590678/lib/hbase/bin/hbase) for HBASE access
Java HotSpot(TM) 64-Bit Server VM warning: Using incremental CMS is deprecated and will likely be removed in a future release
(这两个个Error每次启动都有报,但是不影响)
Error: Could not find or load main class org.apache.flume.tools.GetJavaP