kafka-python_&&_pysparkStreamingContext

28 篇文章 0 订阅
7 篇文章 0 订阅
# coding=utf-8

from pyspark import SparkContext
from pyspark.streaming import StreamingContext


sc = SparkContext("yarn","stream_test")
ssc = StreamingContext(sc,1)

# monitor:  nc -lk 9999
lines = ssc.socketTextStream('localhost',9999)
lines.pprint()

ssc.start()
# ssc.awaitTermination()
ssc.awaitTerminationOrTimeout(100)


# lines = ssc.socketTextStream("localhost",9999)
# words = lines.flatMap(lambda line: line.split(" "))
# pairs = words.map(lambda word: (word,1))
# wordCounts = pairs.reduceByKey(lambda x,y: x+y)
#
# wordCounts.pprint()

# coding=utf-8

from kafka import KafkaConsumer,KafkaProducer
import logging
import json


def kafka_consumer(kafkatopic, groupid, server):
    consumer = KafkaConsumer(
        kafkatopic,
        group_id = groupid,
        bootstrap_servers = server,
        session_timeout_ms = 15000,
        auto_commit_interval_ms = 1000,
        enable_auto_commit = True
    )
    for message in consumer:
        logging.info(message)
        print(message)


def kafka_producer(kafkatopic, server):
    producer = KafkaProducer(
        bootstrap_servers = server,
        value_serializer = lambda v: json.dumps(v).encode('utf-8')
    )
    for i in range(10):
        producer.send(kafkatopic, {i: i**2})
        print(producer.metrics())
    producer.close()


if __name__ == '__main__':
    # kafka_consumer(
    #     kafkatopic = "topic_consumer_xxx",
    #     groupid = "unique_guess",
    #     server = ['10.1.3.xxx:9092','10.1.4.xxx:9092','10.1.5.xxx:9092']
    # )
    kafka_producer(
        server = ['10.1.3.xxx:9092','10.1.4.xxx:9092','10.1.5.xxx:9092'],
        kafkatopic='topic_producer_xxx'
    )
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值