监测kafka消费者,钉钉报警并且增加消费者

一. 背景

  • 监测kafka消费者消费消息是否堆积,当达到一定的值,钉钉提示报警.

  • 适当的增加消费者进行消息的消费.

二. 钉钉报警

import json

import requests
from pykafka import KafkaClient

servers = ['172.18.10.48:9092']
topic_name = 'student_name_logs'



class DingMessageManage(object):
    """钉钉消息提醒预警"""
    class Meta:
        proxy = True

    @classmethod
    def send_ding_message(self, web_hook, ding_user, send_msg):
        """
        :param web_hook: 钉钉机器人配置地址
        :param send_msg: 发送的消息
        :param ding_user: 钉钉提醒的人员手机号
        :return: No return
        """

        # 请求的URL,WebHook地址
        # 构建请求头部
        header = {
            "Content-Type": "application/json",
            "Charset": "UTF-8"
        }
        # 构建请求数据
        message = {
            "msgtype": "text",
            "text": {
                "content": str(send_msg)
            },
            "at": {
                # 如果你想@某人,这里需要填写上对方钉钉上的手机号
                "atMobiles": ding_user,
                # @所有人
                "isAtAll": False
            }

        }
        # 对请求的数据进行json封装
        message_json = json.dumps(message)
        # 发送请求
        info = requests.post(url=web_hook, data=message_json, headers=header)
        # 打印返回的结果
        print("ding message return info:{}".format(info.text))



def kafka_customer_handle(servers, topic_name, partition,consumer_group):
    # kafka消费数据
    servers = ','.join(servers)  # '127.0.0.1:9092,127.0.0.2:9092,127.0.0.3:9092'
    client = KafkaClient(hosts=servers)
    topic = client.topics[topic_name]
    partitions = topic.partitions
    consumer = topic.get_simple_consumer(consumer_group=consumer_group, auto_commit_interval_ms=1, auto_commit_enable=True,
                                         consumer_id=b'rpo')
    print(consumer.held_offsets)  # 获取偏移量
    print("分区 {}".format(partitions))
    earliest_offsets = topic.earliest_available_offsets()
    print("最早可用offset {}".format(earliest_offsets))
    last_offsets = topic.latest_available_offsets()
    print("最近可用offset {}".format(last_offsets))
    offset = consumer.held_offsets
    rest = 0
    print("当前消费者分区offset情况{}".format(offset))
    for  o in offset:
        rest +=last_offsets[o].offset[0]-offset[o]-1
        print('分区%s,未消费消息:%s条'%(o,last_offsets[o].offset[0]-offset[o]-1))
    print('未消费总消息:%s条'%sum)


    if rest > 100:
        error_msg = "业务提醒:获取kafka未消费的数据: {}".format(str(rest))
        web_hook = 'https://oapi.dingtalk.com/robot/send?access_token=a4d587080949985a81d771a1e216b4403b6f11111a5dab0e3150782f9a9a70cc72007'
        ding_user = [18000000]
        DingMessageManage().send_ding_message(web_hook=web_hook, ding_user=ding_user, send_msg=error_msg)

partition = 0  # 分区
consumer_group = 'consumer_group_test'
kafka_customer_handle(servers, topic_name, partition, consumer_group)

三. 增加消费者

1. 先增加分区

./bin/kafka-topics.sh --alter --zookeeper localhost:2181 --partitions 2 --topic student_name_logs

2.查看主题的详细信息指令

./bin/kafka-topics.sh --zookeeper localhost:2181 --describe --topic 主题名称

3.消费

import json
import threading
import time
from pykafka import KafkaClient


class Kafka_customer(object):
    def __init__(self, servers=None, topic_name=None):
        self.servers = ['172.18.10.48:9092'] if not servers else servers
        self.topic_name = 'student_name_logs' if not topic_name else topic_name


    def kafka_customer_handle(self, partition, consumer_group):
        # kafka消费数据
        servers = ','.join(self.servers)  # '127.0.0.1:9092,127.0.0.2:9092,127.0.0.3:9092'
        client = KafkaClient(hosts=servers)
        topic = client.topics[self.topic_name]
        partitions = topic.partitions
        consumer = topic.get_simple_consumer(consumer_group=consumer_group, auto_commit_interval_ms=1,
                                             auto_commit_enable=True,
                                             consumer_id=b'rpo', partitions=[partitions[partition]])
        print(consumer.held_offsets)  # 获取偏移量
        print("分区 {}".format(partitions))
        earliest_offsets = topic.earliest_available_offsets()
        print("最早可用offset {}".format(earliest_offsets))
        last_offsets = topic.latest_available_offsets()
        print("最近可用offset {}".format(last_offsets))
        offset = consumer.held_offsets
        print("当前消费者分区offset情况{}".format(offset))
        print('未消费消息:%s条' % (last_offsets[partition].offset[0] - offset[partition] - 1))
        for message in consumer:
            value = json.loads(message.value.decode(encoding='utf-8'))
            if partition==0:
                print(value,'-----0---')
            else:
                print(value,'-----1---')
            time.sleep(0.5)



if __name__ == '__main__':
    threadpool = []
    kafka_customer_handle = Kafka_customer().kafka_customer_handle
    for i in range(2):    # 2表示2个分区
        th = threading.Thread(target=kafka_customer_handle, args=(i, 'consumer_group_test'))
        threadpool.append(th)

    for th in threadpool:
        th.start()

    for th in threadpool:
        threading.Thread.join(th)
    print('end')

tips:  RabbitMq直接过开个进程就是了(增加消费者)

  • 2
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值