flume 收集 Nginx到 kafka

[root@localhost apache-flume-1.9.0-bin]# cd conf

[root@localhost conf]# ls

      flume-conf.properties.template flume-env.ps1.template flume-env.sh.template log4j.properties

[root@localhost conf]# cp flume-env.sh.template flume-env.sh

[root@localhost conf]# vim flume-env.sh

[root@localhost conf]# cp flume-conf.properties.template flume-conf.properties

        flume-conf.properties

        flume-conf.properties.template

        flume-env.ps1.template

        flume-env.sh

        flume-env.sh.template

        log4j.properties

[root@localhost conf]# vim exec.conf

a1.sources = r1

a1.channels = c1

a1.sinks = k1

a1.sources.r1.type = exec

a1.sources.r1.command = tail -F /usr/local/openresty/nginx/logs/access.log

a1.sources.r1.channels = c1

a1.channels.c1.type = memory

a1.channels.c1.capacity = 10000

a1.channels.c1.transactionCapacity=100

a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink

a1.sinks.k1.topic= mytopic

a1.sinks.k1.brokerList=101.132.164.122:9092

a1.sinks.k1.requiredAcks=1

a1.sinks.k1.batchSize=20

a1.sinks.k1.channel=c1

[root@izuf6i88rqjzw5fpredfntz conf]# ls

bin CHANGELOG conf DEVNOTES doap_Flume.rdf docs lib LICENSE NOTICE README.md RELEASE-NOTES tools

[root@izuf6i88rqjzw5fpredfntz apache-flume-1.9.0-bin]# bin/flume-ng agent -c conf -f conf/exec.conf -n a1 -Dflume.root.logger=INFO,console

Info: Sourcing environment configuration script /usr/local/apache-flume-1.9.0-bin/conf/flume-env.sh

Info: Including Hadoop libraries found via (/usr/local/hadoop-2.10.0/bin/hadoop) for HDFS access

Info: Including HBASE libraries found via (/usr/local/hbase-2.2.5/bin/hbase) for HBASE access

Info: Including Hive libraries found via (/usr/local/hive-2.3.7) for Hive access

org.apache.kafka.common.config.AbstractConfig.logAll(AbstractConfig.java:279)] ProducerConfig values:

acks = 1

batch.size = 16384

bootstrap.servers = [101.132.164.122:9092]

buffer.memory = 33554432

client.id =

compression.type = none

connections.max.idle.ms = 540000

enable.idempotence = false

interceptor.classes = []

key.serializer = class org.apache.kafka.common.serialization.StringSerializer

linger.ms = 0

max.block.ms = 60000

max.in.flight.requests.per.connection = 5

max.request.size = 1048576

metadata.max.age.ms = 300000

metric.reporters = []

metrics.num.samples = 2

metrics.recording.level = INFO

metrics.sample.window.ms = 30000

partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner

receive.buffer.bytes = 32768

reconnect.backoff.max.ms = 1000

reconnect.backoff.ms = 50

request.timeout.ms = 30000

retries = 0

retry.backoff.ms = 100

sasl.client.callback.handler.class = null

sasl.jaas.config = null

sasl.kerberos.kinit.cmd = /usr/bin/kinit

sasl.kerberos.min.time.before.relogin = 60000

sasl.kerberos.service.name = null

sasl.kerberos.ticket.renew.jitter = 0.05

sasl.kerberos.ticket.renew.window.factor = 0.8

sasl.login.callback.handler.class = null

sasl.login.class = null

sasl.login.refresh.buffer.seconds = 300

sasl.login.refresh.min.period.seconds = 60

sasl.login.refresh.window.factor = 0.8

sasl.login.refresh.window.jitter = 0.05

sasl.mechanism = GSSAPI

security.protocol = PLAINTEXT

send.buffer.bytes = 131072

ssl.cipher.suites = null

ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]

ssl.endpoint.identification.algorithm = https

ssl.key.password = null

ssl.keymanager.algorithm = SunX509

ssl.keystore.location = null

ssl.keystore.password = null

ssl.keystore.type = JKS

ssl.protocol = TLS

ssl.provider = null

ssl.secure.random.implementation = null

ssl.trustmanager.algorithm = PKIX

ssl.truststore.location = null

ssl.truststore.password = null

ssl.truststore.type = JKS

transaction.timeout.ms = 60000

transactional.id = null

value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer

2020-09-28 17:00:06,557 (lifecycleSupervisor-1-1) [INFO - org.apache.kafka.common.utils.AppInfoParser$AppInfo.<init>(AppInfoParser.java:109)] Kafka version : 2.0.1

2020-09-28 17:00:06,557 (lifecycleSupervisor-1-1) [INFO - org.apache.kafka.common.utils.AppInfoParser$AppInfo.<init>(AppInfoParser.java:110)] Kafka commitId : fa14705e51bd2ce5

2020-09-28 17:00:06,559 (lifecycleSupervisor-1-1) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.register(MonitoredCounterGroup.java:119)] Monitored counter group for type: SINK, name: k1: Successfully registered new MBean.

2020-09-28 17:00:06,559 (lifecycleSupervisor-1-1) [INFO - org.apache.flume.instrumentation.MonitoredCounterGroup.start(MonitoredCounterGroup.java:95)] Component type: SINK, name: k1 started

2020-09-28 17:00:09,713 (kafka-producer-network-thread | producer-1) [INFO - org.apache.kafka.clients.Metadata.update(Metadata.java:285)] Cluster ID: 62XCS_6jSomMnIAVezwsbw

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值