引入kafka相关依赖
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>2.2.0.RELEASE</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
application.yml
spring:
kafka:
# 指定kafka 代理地址,可以多个
bootstrap-servers: 192.168.233.11:9092,192.168.233.11:9093,192.168.233.11:9094
producer:
retries: 0
# 每次批量发送消息的数量
batch-size: 16384
buffer-memory: 33554432
# 指定消息key和消息体的编解码方式
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
# 指定默认消费者group id
group-id: test-group
auto-offset-reset: earliest
enable-auto-commit: false #自动提交 Kafka不能既开启消费端的自动应答又开启监听模式的自动应答
auto-commit-interval: 100
# 指定消息key和消息体的编解码方式
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializerserver:
port: 8085
servlet:
context-path: /testKafka
生产者
package com.example.demo.test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;import javax.annotation.Resource;
/**
* Kafka消息生产者
* Created by zhihuiyu on 2021/5/19.
*/
@Component
public class KafkaProducer {
@Autowired
private KafkaTemplate kafkaTemplate;public void send(Object message){
for (int i = 0; i < 10; i++) {
kafkaTemplate.send("test3",i+"",message);
}
}
}
消费者
/**
* Kafka消息消费者
* Created by zhihuiyu on 2021/5/19.
*/
@Slf4j
@Component
public class KafkaConsumer {
@Autowired
private KafkaTemplate kafkaTemplate;// 不同的消费组,相同的主题进行消费时,其实是 广播 机制,
// 各人消费的内容都是相同的,kafka 为不同的消费组发送“相同的消息副本”。
// kafka有多个主题,每个主题有多个分区,每个分区又有多条消息。
@KafkaListener(topics = "test4")
public void processMessage(String content) {
log.info("consumer1 processMessage : {}",content);
}
@KafkaListener(topics = "test4",groupId = "testGroup2")
public void processMessage2(String content) {
log.info("consumer2 processMessage : {}",content);
}
// @KafkaListener(topics = "test3")
// public void processMessage3(String content) {
// log.info("consumer3 processMessage : {}",content);
// }
// 消费某一个主题某一个分区的数据,可以指定多个不同的主题
// 没被消费的数据存储在分区里面等待被消费
// ,topicPartitions = {@TopicPartition(topic = "test3",partitions = {"1"})}
// 一个分区最多对应一个消费者,而一个消费者可以对应多个分区
@KafkaListener(topics = "test3",groupId = "testGroup3")
public void processMessage4(String content) {
log.info("consumer4 processMessage : {}",content);
}
@KafkaListener(topics = "test3",groupId = "testGroup3")
public void processMessage5(String content) {
log.info("consumer5 processMessage : {}",content);
}
@KafkaListener(topics = "test3",groupId = "testGroup3")
public void processMessage6(String content) {
log.info("consumer6 processMessage : {}",content);
}@KafkaListener(topics = {"test3","test"},groupId = "testGroup3")
public void processMessage7(String content) {
log.info("consumer7 processMessage : {}",content);
}@KafkaListener(topics = "test5",groupId = "testGroup5",topicPartitions = {@TopicPartition(topic = "test5",partitionOffsets = {@PartitionOffset( partition = "0",initialOffset = "200")})})
public void processMessage5(List<ConsumerRecord<String, Object>> consumerRecordList,Acknowledgment acknowledgment) {
try {
consumerRecordList.forEach(one->{
int partition = one.partition();
String key = one.key();
long offset = one.offset();
String topic = one.topic();
Object value = one.value();
log.info("分区:{},key:{},偏移量:{},主题:{},内容:{}",partition,key,offset,topic,value);
});
}finally {
// 不论成败,都提交,避免出错导致死循环,避免丢消息的逻辑,可以在catch里备份
acknowledgment.acknowledge();
}
}}
controller
/**
* Kafka功能测试
* Created by macro on 2021/5/19.
*/
@Api(tags = "KafkaController", description = "Kafka功能测试")
@Controller
@RequestMapping("/kafka")public class KafkaController {
@Autowired
private KafkaProducer kafkaProducer;@ApiOperation("发送消息")
@RequestMapping(value = "/sendMessage", method = RequestMethod.GET)
@ResponseBody
public SelectResult sendMessage(@RequestParam Object message) {
kafkaProducer.send(message);
return SelectResult.success(null);
}
}