springboot整合kafka
1.导入kafka坐标
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
2.编写配置文件application.properies
#=================kafka 配置==================================#
# 制定kafka的zookeeper 代理地址
#原始数据kafka读取 这里的ip是你自己kafka的ip
spring.kafka.bootstrap-servers=192.168.30.137:9092
# 生产者
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
## 消费者 test-consumer-group是你自己kafka的组
spring.kafka.consumer.group-id=test-consumer-group
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval=1000
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
3.编写生产端
3.1 注入 KafkaTemplate
@Autowired
private KafkaTemplate kafkaTemplate;
3.2 增加方法
@RequestMapping(value = "bootProduce",method = RequestMethod.POST)
public void produceBootMsg(){
logger.info("kafka日志收集的日志。。。。。。。。");
logger.error("kafka日志收集的日志。。。。。。。。error");
kafkaTemplate.send("productscanlog",0,"key1","springboot测试消息1");
kafkaTemplate.send("productscanlog","我的springboot测试消息2");
kafkaTemplate.send("productscanlog","我的springboot测试消息3");
}
4.编写消费端
package com.yyj.linstencer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
@Component
@EnableKafka
public class ConsumerLinstener {
@KafkaListener(topics = "productscanlog")
public void listen (ConsumerRecord<String, String> record) throws Exception {
System.out.printf("topic = %s, offset = %d, value = %s \n", record.topic(), record.offset(), record.value());
}
}
程序启动后就可以在服务器监听到消费的信息了。