参考
https://blog.csdn.net/qq_21040559/article/details/122839376
https://blog.csdn.net/u011019141/article/details/108803361
https://segmentfault.com/a/1190000038909021
https://blog.csdn.net/supreme000/article/details/97794999
https://blog.csdn.net/yuanlong122716/article/details/105160545
导包&配置
pom.xml
/project/dependencies 新增
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
application.yaml
新增
spring:
kafka:
bootstrap-servers: whfc.cc:9092,whfc.cc:9093
producer: # producer 生产者
retries: 0 # 重试次数
acks: 1 # 应答级别:多少个分区副本备份完成时向生产者发送ack确认(可选0、1、all/-1)
batch-size: 16384 # 批量大小
buffer-memory: 33554432 # 生产端缓冲区大小
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer: # consumer消费者
group-id: bank # 默认的消费组ID
# enable-auto-commit: true # 是否自动提交offset
# auto-commit-interval: 100 # 提交offset延时(接收到消息后多久提交offset)
auto-offset-reset: latest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
Producer
@Component
@Slf4j
// 生产者(消息发送者)
public class Producer {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
public void sendRecord(Orders orders) {
kafkaTemplate.send(ConstString.TOPIC,orders.getId().toString());
log.info("生成订单, ordersId={}",orders);
}
}
Consumer
@Component
public class MyConsumer {
// private static final Logger LOGGER= LoggerFactory.getLogger("kafkaConsumer.class");
@KafkaListener(topics = {ConstString.TOPIC})
public void onMessage1(ConsumerRecord<?, ?> consumerRecord, Consumer consumer) throws MyException, InterruptedException {
Optional<?> optional = Optional.ofNullable(consumerRecord.value());
/* if (!optional.isPresent()) {
LOGGER.info("message={}, value={}",null,null);
} */
// 业务代码
// 手动完成消息确认(也可以自动配置:开启`enable-auto-commit`,同时参数不需要`consumer`,)
consumer.commitAsync();
}
}