kafka python简单使用

producer = topic.get_producer()
producer.produce(['test message ' + str(i ** 2) for i in range(4)])

nohup bash kafka-server-start.sh ../config/server.properties >>kafka2.log &

ls -lrt /etc/alternatives/java

export JAVA_HOME=/data/app/jdk8
export KE_HOME=/data/app/kafka-eagle
export PATH=$PATH:$KE_HOME/bin

#查看topic
kafka-topics.sh --zookeeper 127.0.0.1:2181 --list
kafka-topics.sh --zookeeper 127.0.0.1:2181 --topic "ziniao_detail_result" --describe
#查看consumer
kafka-consumer-groups.sh --zookeeper 127.0.0.1:2181 --list
kafka-consumer-groups.sh --zookeeper 127.0.0.1:2181 --group hello_kafka --describe

./kafka-topics.sh -zookeeper localhost:2181 -alter -partitions 4 -topic ziniao_detail_result
#增加kafka的分区数
./kafka-topics.sh -zookeeper localhost:2181 -alter -partitions 4 -topic list_result
#修改kafka的消息保留时间,以小时计算
kafka-configs.sh –zookeeper localhost:2181 –entity-type topics –entity-name test –alter –add-config log.retention.hours=120

#测试kafka的性能
./kafka-producer-perf-test.sh --topic test --num-records 100 --record-size 1 --throughput 100  --producer-props bootstrap.servers=*.*.*.*:9092
#删除kafka的topic
./kafka-topics.sh --zookeeper 127.0.0.1:2181 --delete --topic ziniao_detail_result
#增加jmx查看
vi bin/kafka-run-class.sh  >>> JMX_PORT=8060

#
./kafka-topics.sh --delete --zookeeper 127.0.0.1:2181 --topic asin_cost_calculator


./kafka-console-consumer.sh --bootstrap-server *.*.*.*:9092 --topic test --from-beginning --new-consumer #新建consumer group
./kafka-consumer-groups.sh --bootstrap-server *.*.*.*:9092  --list --new-consumer


#获取指定consumer group的位移信息
./kafka-simple-consumer-shell.sh --topic __consumer_offsets --partition 1 --broker-list *.*.*.*:9092 --formatter "kafka.coordinator.group.GroupMetadataManager\$OffsetsMessageFormatter"

#查询__consumer_offsets topic所有内容
./kafka-console-consumer.sh --topic  __consumer_offsets --bootstrap-server *.*.*.*:9092 --formatter "kafka.coordinator.group.GroupMetadataManager\$OffsetsMessageFormatter" --consumer.config ../config/consumer.properties --from-beginning


./kafka-consumer-groups.sh --zookeeper 127.0.0.1:2181 --list #仅仅显示zookeeper里面注册的consumer,不显示Java consumer API的用户组
./kafka-consumer-groups.sh --new-consumer --bootstrap-server *.*.*.*:9092 --list #显示活跃consumer消费者组
./kafka-consumer-groups.sh --new-consumer --bootstrap-server 127.0.0.1:9092 --group s3_uploader --describe #consumer 消费者消费详情
./kafka-run-class.sh kafka.tools.ConsumerOffsetChecker --zookeeper 127.0.0.1:2181 --group s3_uploader


SELECT count(*) FROM "ziniao_detail_result" WHERE "partition" IN (2) 

# 指定分区的指定offset开始消费
from scrapy_crawler.settings import KAFKA_HOST
consumerx = KafkaConsumer('list_result', bootstrap_servers=[KAFKA_HOST, ], auto_offset_reset='earliest',
                          group_id='gen_calc_task_consumer', client_id='test002')  #
consumerx.unsubscribe()
consumerx.assign([TopicPartition(topic='datacubes_detail_result', partition=0), ]) # 指定分区订阅
print consumerx.end_offsets([TopicPartition(topic='datacubes_detail_result', partition=0), ])  # 查看最新producer的offset  {TopicPartition(topic=u'datacubes_detail_result', partition=0): 103005}
print consumerx.assignment()  # set([TopicPartition(topic='datacubes_detail_result', partition=0)])
print consumerx.beginning_offsets(consumerx.assignment())  # 获取当前消费者可消费的偏移量 {TopicPartition(topic=u'datacubes_detail_result', partition=0): 0}
consumerx.seek(TopicPartition(topic='datacubes_detail_result', partition=0), 20)  # 从指定offset开始消费
consumerx.seek_to_beginning(TopicPartition(topic='datacubes_detail_result', partition=0))  # 从最开始offset开始消费
consumerx.seek_to_end(TopicPartition(topic='datacubes_detail_result', partition=0))  # 从最新offset开始消费
print consumerx.committed(TopicPartition(topic='datacubes_detail_result', partition=0)) # 查看consumer 已提交的最新offset
for con in consumerx:
    print con #开始循环消费消息

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值