连接Kafaka

from kafka import KafkaProducer, KafkaConsumer
import threading
import yaml
import function.jyupay_hb_5 as upay_hb_5_kafka
import function.upay_log as upay_log_kafka
import function.jyupay_hb_info as jyupay_hb_info_kafka
import function.upay_metric as upay_metric_kafka


class KafkaConnect(threading.Thread):
    def __init__(self, kafkahost, kafkaport, kafkatopic, kafkakerberos):
        threading.Thread.__init__(self)
        self.kafkaHost = kafkahost
        self.kafkaPort = kafkaport
        self.kafkatopic = kafkatopic
        if kafkakerberos['is_used'] == 0:
            self.consumer = KafkaConsumer(self.kafkatopic, bootstrap_servers='{kafka_host}:{kafka_port}'.format(
                kafka_host=self.kafkaHost, kafka_port=self.kafkaPort))
        elif kafkakerberos['is_used'] == 1:
            self.consumer = KafkaConsumer(self.kafkatopic, bootstrap_servers='{kafka_host}:{kafka_port}'.format(
                kafka_host=self.kafkaHost, kafka_port=self.kafkaPort), security_protocol="SASL_PLAINTEXT",
                                          sasl_mechanism="GSSAPI", sasl_kerberos_service_name="kafka",
                                          compression_type='gzip')

        # security_protocol="SASL_PLAINTEXT",# sasl_mechanism="GSSAPI",# sasl_kerberos_service_name="kafka",# compression_type='gzip'  # 压缩方式)

    def run(self):
        for msg in self.consumer:
            if self.kafkatopic == 'UPAY-LOG':
                upay_log_kafka.kafka_data_handle(msg.value)
                # print(self.kafkatopic)
            elif self.kafkatopic == 'jyupay_hb_5':
                upay_hb_5_kafka.kafka_data_handle(msg.value)
                # print(self.kafkatopic)
            elif self.kafkatopic == 'jyupay_hb_info':
                jyupay_hb_info_kafka.kafka_data_handle(msg.value)
                # print(self.kafkatopic)
            elif self.kafkatopic == 'UPAY-METRIC':
                upay_metric_kafka.kafka_data_handle(msg.value)
                # print(self.kafkatopic)


def main():
    with open("kafka_config.yaml", 'r', encoding='utf-8') as yml_file:
        cfg = list(yaml.load_all(yml_file, Loader=yaml.SafeLoader))
        kafka_info = cfg[0]
    thread_dict = {}
    for data_info in kafka_info['kafka_config']:
        print(data_info)
        kafkahost = kafka_info['kafka_config'][data_info]['kafkaHost']
        kafkaport = kafka_info['kafka_config'][data_info]['kafkaPort']
        topic = kafka_info['kafka_config'][data_info]['topic']
        kerberos = kafka_info['kafka_config'][data_info]['kerberos']
        thread_dict[data_info] = KafkaConnect(kafkahost, kafkaport, topic, kerberos)
        thread_dict[data_info].start()

    for thread_info in thread_dict:
        thread_dict[thread_info].join()
    '''topic = ["upay_log",'redis_log']
    thread1= Kafka_Connect("101.11.93.126", "9092", topic[0])
    thread2 = Kafka_Connect("101.11.94.156", "9092", topic[1])
    thread1.start()
    thread2.start()
    thread1.join()
    thread2.join()'''


if __name__ == '__main__':
    main()

import re
import json
import traceback
import uuid

from cassandra.auth import PlainTextAuthProvider
from cassandra.cluster import Cluster

from function import cassandra_ip, cassandra_port, cassandra_username, cassandra_password, cassandra_keyspace
from function.utils import datetime2timestamp


class Cassandra:
    __exist_instance = None

    def __new__(cls, *args, **kwargs):
        if not cls.__exist_instance:
            cls.__exist_instance = super().__new__(cls)
        return cls.__exist_instance

    def __init__(self):
        self._db = Cluster([cassandra_ip], port=cassandra_port,
                           auth_provider=PlainTextAuthProvider(username=cassandra_username,
                                                               password=cassandra_password))
        self._key_space = cassandra_keyspace
        self.session = self._db.connect(self._key_space)

    def __enter__(self):
        return self.session

    def __exit__(self, exc_type, exc_val, exc_tb):
        pass

    def __del__(self):
        if self._db:
            self._db.shutdown()


def kafka_data_handle(msg):

    msg_logs= str(msg,encoding='utf8 ')

    msg_log = msg_logs.split('#')

    #时间戳 msg_time
    time_stamp = msg_log[0][0:-4]
    time_stamp = datetime2timestamp(time_stamp,form='%Y-%m-%d %H:%M:%S')


    #主机名 hostname
    hostnames = msg_log[1]
    host_na = hostnames.split('_',1)
    host_name = host_na[0]

    #实例 instance
    instance = host_na[1]

    #全部报文 msg_log
    ms_log = msg_log[3]

    #序列号 serial
    serial = msg_log[2][7:]

    # 返回码 return_code
    try:
        re_da = re.match(".*<RspCode>(.*?)</RspCode>.*",ms_log)
        return_code = re_da.group(1)
    except:
        return_code = None

    #交易类型 ActivityCode
    try:
        trancation_ty = re.match(".*<ActivityCode>(.*?)</ActivityCode>.*", ms_log)
        trancation_type = trancation_ty.group(1)
        if trancation_type == 'C001_001' or trancation_type == 'T1000167' or trancation_type == 'C001_002':
            business_type = '充值业务'
        elif trancation_type == 'C002_001' or trancation_type == 'T1000160' or trancation_type == 'C002_001':
            business_type = '充值结果查询'
        elif trancation_type == 'C004_001' or trancation_type == 'T1000151' or trancation_type == 'C004_001':
            business_type = '手机号查询业务'
    except:
        pass

    print(ms_log)

    id = uuid.uuid1()
    business_type = ''
    try:
        with Cassandra() as session:
            insert_cql = "INSERT INTO message_test (id, business_type, msg_time, hostname, instance, msg_log, " \
                         "return_code,serial,trancation_type) VALUES" \
                         " ({},'{}','{}','{}','{}','{}','{}','{}','{}')".format(id,business_type,time_stamp,host_name,instance,ms_log,return_code,serial,trancation_type)

            session.execute(insert_cql)
    except Exception as e:
        print("handle mess_log data ERROR: ", e)
        print(traceback.format_exc())

    return 1

import json
import time
import pymongo
from kafka import KafkaProducer
from kafka import KafkaConsumer
from kafka.errors import KafkaError

KAFKA_HOST = "10.1.62.158"
KAFKA_PORT = 9092
KAFKA_TOPIC = "mergeEvent"

myclient = pymongo.MongoClient(
    "mongodb://developer:123456@10.1.69.4:27017/?authSource=admin&readPreference=primary&appname=MongoDB%20Compass&ssl=false")
mydb = myclient["EB_AIOps"]


def producer(data):
    try:
        bootstrap_servers = '{kafka_host}:{kafka_port}'.format(
            kafka_host=KAFKA_HOST,
            kafka_port=KAFKA_PORT
        )
        kfk_producer = KafkaProducer(bootstrap_servers=bootstrap_servers)
        parmas_message = json.dumps(data, ensure_ascii=False).encode('utf-8')
        print(parmas_message)

        kfk_producer.send(KAFKA_TOPIC, parmas_message)
        kfk_producer.flush()
    except KafkaError as e:
        print(e)


def consumer():
    consumer = KafkaConsumer(KAFKA_TOPIC,
                             bootstrap_servers=KAFKA_HOST,
                             auto_offset_reset='earliest', value_deserializer=json.loads
                             )
    for message in consumer:
        print(message.value)


def producer_alarm_org_data(num, skip_num=0):
    row = mydb['alarm_org_data'].find().skip(skip_num).limit(num)
    row = list(row)
    for i, d in enumerate(row):
        for k, v in d.items():
            if k == "_id":
                row[i][k] = str(v)
    for v in row:
        print('producer: ', v)
        producer(v)


if __name__ == "__main__":
    num = 1
    skip_num = 3
    while True:
        producer_alarm_org_data(num, skip_num)
        skip_num += num
        time.sleep(5)
    #consumer()
    # row = mydb['log_detail'].find().sort('_id', -1).limit(10)
    # row = list(row)
    # for v in row:
    #     print(v.get('a'))



评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值