python3 python_kafka.py 172.31.0.1:9092 topic
from kafka import KafkaProducer, KafkaConsumer,KafkaAdminClient
from time import sleep
import time
import threading
import sys
from pykafka import KafkaClient
admin_client = KafkaAdminClient(bootstrap_servers=sys.argv[1])
print(admin_client.describe_cluster() )
bootstrap_servers = sys.argv[1]
topic = sys.argv[2]
def start_producer():
producer = KafkaProducer(bootstrap_servers=bootstrap_servers)
for i in range(0,100000):
msg = 'msg is ' + str(i)
producer.send(topic, msg.encode('utf-8'))
sleep(3)
def start_consumer():
consumer = KafkaConsumer(topic, bootstrap_servers=bootstrap_servers)
print( consumer.topics() )
for msg in consumer:
print(msg)
print("time = ", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime( msg.timestamp/1000 )) )
threads = [] #定义一个线程池
t1=threading.Thread(target=start_producer)
threads.append(t1) #把t1线程装到线程池里
t2=threading.Thread(target=start_consumer)
threads.append(t2) #把t2线程装到线程池里
client = KafkaClient(hosts=sys.argv[1])
#查看主题
#print( client.topics )
#查看brokers
print( client.brokers )
for t in threads:
t.start()
from elasticsearch import Elasticsearch
es = Elasticsearch(['http://172.31.0.60:9200'], basic_auth=('elastic', ''), request_timeout=3600)
print(es.search(index='indxx') )
from kafka import KafkaAdminClient
from kafka.admin import NewTopic
import sys
admin_client = KafkaAdminClient(bootstrap_servers='localhost:9092')
# 指定新主题的名称、分区数和复制因子
topic_name = sys.argv[1]
num_partitions = 3
replication_factor = 1
new_topic = NewTopic(name=topic_name, num_partitions=num_partitions, replication_factor=replication_factor)
admin_client.create_topics(new_topics=[new_topic], validate_only=False)
topics_set = admin_client.list_topics()
if topic_name in topics_set:
print("成功创建主题:{}".format(topic_name))
else:
print("无法创建主题")
admin_client.delete_topics(topics=[topic_name])
if topic_name not in topics_set:
print("成功删除主题:{}".format(topic_name))
else:
print("无法删除主题")