#coding=utf-8
importtimeimportloggingimportsysimportjsonimportetc.config as conf
sys.path.append('***********/kafka-python-1.3.3')from kafka importKafkaProducerfrom kafka importKafkaConsumerfrom kafka.errors importKafkaErrorfrom kafka importTopicPartitiondeflog_name():
base_name=conf.kafka_logDir
date= time.strftime('%Y%m%d',time.localtime(time.time())) + '.log'
return base_name +date
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)-15s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
filename=log_name(),
filemode='a')
console=logging.StreamHandler()
console.setLevel(logging.INFO)
logging.getLogger('').addHandler(console)classkfkProducer(object):#producer = None
def __init__(self, broker, kafkaPort, kafkaTopic=''):
self._broker=broker
self._kafkaPort=kafkaPort
self._kafkaTopic=kafkaTopicdef __str__(self):
logging.info("--------------------------------")
logging.info("kafka-producer params ...")
logging.info("[KAFKA-BROKER]:%s" %self._broker)
logging.info("[KAFKA-PORT]:%s" %self._kafkaPort)
logging.info("[KAFKA-TOPIC]:%s" %self._kafkaTopic)
logging.info("--------------------------------")defregisterKfkProducer(self):try:
producer= KafkaProducer(bootstrap_servers = '{kafka_host}:{kafka_port}'.format(
kafka_host=self._broker,
kafka_port=self._kafkaPort
))exceptKafkaError as e:
logging.info(e)returnproducerdef produceMsg(self, topic, msg, partition=0):#自动将输入字符串转化为json格式,产出消息
if(topic in ('', None)):
logging.error("topic is None, plz check!")else:try:#parmas_message = json.dumps(msg)#转化为json格式
producer =self.registerKfkProducer()
producer.send(topic, value=msg, partition=partition)
producer.flush()#time.sleep(1)
exceptKafkaError as e:
logging.info(e)classkfkConsumer(object):#consumer = None
def __init__(self, broker, kafkaPort, kafkaTopic=''):
self._broker=broker
self._kafkaPort=kafkaPort
self._kafkaTopic=kafkaTopicdef __str__(self):
logging.info("--------------------------------")
logging.info("kafka-consumer params ...")
logging.info("[KAFKA-BROKER]:%s" %self._broker)
logging.info("[KAFKA-PORT]:%s" %self._kafkaPort)
logging.info("[KAFKA-TOPIC]:%s" %self._kafkaTopic)
logging.info("--------------------------------")defregisterConsumer(self):try:
consumer=KafkaConsumer(
bootstrap_servers=[self._broker+':'+self._kafkaPort],
auto_offset_reset='earliest')exceptKafkaError as e:
logging.info(e)returnconsumerdef consumerMsg(self, topic, partition=0):if(topic in ('', None)):
logging.error("topic is None, plz check!")else:try:
v_consumer=self.registerConsumer()
v_consumer.assign([TopicPartition(topic,partition)])#self.registerConsumer.subscribe([self._kafkaTopic])
for message inv_consumer:#message value and key are raw bytes -- decode if necessary!
#e.g., for unicode: `message.value.decode('utf-8')
logging.info("%s:%d:%d: msg=%s" %(message.topic, message.partition,
message.offset, message.value.decode('utf-8')))exceptKafkaError as e:
logging.info(e)