kafka2.13 整合springboot 2.1.4
创建springboot项目
springboot项目这里使用的是2.1.4版本,不同的kafka版本需要对springboot版本有要求,使用时注意一下
项目目录结构如下所示
说明一下各个类的作用
类名 | 说明 |
---|---|
batchFactoryBean.java | 开启批量处理消息队列的消息 |
KafkaController.java | controller控制器,用于接收前端请求 |
TrainingInfo.java | 存入消息队列的实体类 |
KafkaProducer.java | 生产消息的类 |
KafaReceiver.java | 消费消息的类,这里使用定时消费 |
ResultJSON.java | springboot返回的数据对象 |
application-dev.yml | 配置文件 |
pom.xml | maven的jar包管理器 |
导入需要用的jar包
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.4.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.kafka.demo</groupId>
<artifactId>kafka_demo</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>demo</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!-- kafka的jar包 -->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>2.2.0.RELEASE</version>
</dependency>
<!-- 需要用到json转换 -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.62</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
需要用的jar包只有两个
配置kafka配置文件
server:
port: 8081
spring:
#-------------------------------------kafka config start----------------------------------
kafka:
#指定kafka 代理地址,可以多个,集群配多个,中间,逗号隔开
bootstrap-servers: 192.168.5.39:9092,192.168.5.40:9092,192.168.5.41:9092
#=============== provider =======================
producer:
retries: 0
# 每次批量发送消息的数量
batch-size: 16384
buffer-memory: 33554432
# 指定消息key和消息体的编解码方式
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
#=============== consumer =======================
consumer:
# 指定默认消费者group id
broker-id: 1
# group-id: kafka-group
group-id: test-consumer-group
# auto-offset-reset: earliest
# enable-auto-commit: true
enable-auto-commit: false
max-poll-records: 100
auto-offset-reset: latest
# auto-commit-interval: 100
# 指定消息key和消息体的编解码方式
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
listener:
ack-mode: manual
#每个listener拥有一个处理线程
concurrency: 1
#-------------------------------------kafka config end----------------------------------
根据自己的kafka配置文件去修改group-id的值
查看方式为: 进入到安装解压目录中,查看config下的consumer.properties文件
cd config
vi consumer.properties
看到group.id的值
配置FactoryBean批量处理手动消费
import org.springframework.boot.autoconfigure.kafka.ConcurrentKafkaListenerContainerFactoryConfigurer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
@Configuration
public class batchFactoryBean {
/**
* kafka监听工厂
* @param configurer
* @return
*/
@Bean("batchFactory")
public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
ConcurrentKafkaListenerContainerFactoryConfigurer configurer, ConsumerFactory consumerFactory) {
ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
// 开启批量消费功能
factory.setBatchListener(true);
// 不自动启动
factory.setAutoStartup(false);
configurer.configure(factory, consumerFactory);
return factory;
}
}
创建工具类/实体类/controller
工具类ResultJSON.java
package com.kafka.demo.kafka_demo.utils;
import org.springframework.util.StringUtils;
public class ResultJSON {
// code 状态码: 成功:200,失败:500, 202登录失效
private String code;
// 错误信息
private String msg;
// 返回的数据
private Object responseData;
private int currentPage;// 当前页码
private int pageSize;// 每页数据条数
private int recordCount; // 总记录数
// 计算
private int pageCount; // 总页数
// 成功返回<无返回数据>
public static ResultJSON success() {
ResultJSON result = new ResultJSON("200", "操作成功", null);
return result;
}
// 成功返回<有返回数据>
public static ResultJSON success(Object responseData) {
ResultJSON result = new ResultJSON("200", "操作成功", responseData);
return result;
}
// 成功返回<返回分页数据>
public static ResultJSON success(int currentPage, int pageSize, int recordCount, Object responseData) {
ResultJSON result = new ResultJSON("200", "操作成功", responseData, currentPage, pageSize, recordCount);
return result;
}
// 代码抛异常
public static ResultJSON error(String string) {
ResultJSON result = new ResultJSON("200", string, null);
result.setCode("500");
if (StringUtils.isEmpty(string)) {
result.setMsg("操作失败");
}
return result;
}
// 自定义返回状态及返回数据
public ResultJSON(String code, String msg, Object responseData) {
this.code = code;
this.msg = msg;
this.responseData = responseData;
}
// 自定义返回分页状态及返回数据
public ResultJSON(String code, String msg, Object responseData, int currentPage, int pageSize, int recordCount) {
this.currentPage = currentPage;
this.pageSize = pageSize;
this.recordCount = recordCount;
this.code = code;
this.msg = msg;
this.responseData = responseData;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public Object getResponseData() {
return responseData;
}
public void setResponseData(Object responseData) {
this.responseData = responseData;
}
public int getCurrentPage() {
return currentPage;
}
public void setCurrentPage(int currentPage) {
this.currentPage = currentPage;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
public int getRecordCount() {
return recordCount;
}
public void setRecordCount(int recordCount) {
this.recordCount = recordCount;
}
public int getPageCount() {
if (this.recordCount > 0) {
if (this.recordCount % this.pageSize == 0) {
this.pageCount = this.recordCount / this.pageSize;
return pageCount;
}
this.pageCount = this.recordCount / this.pageSize + 1;
return pageCount;
}
return 0;
}
public void setPageCount(int pageCount) {
this.pageCount = pageCount;
}
}
实体类TrainingInfo.java
package com.kafka.demo.kafka_demo.entity;
import java.util.Date;
public class TrainingInfo {
private Integer id;
private Integer trainContent;
private Integer algoId;
private Integer status;
private Date beginTime;
private Date endTime;
private Date createTime;
private Date deleteTime;
private Integer createPerson;
private Integer isAvailable;
//当前页码
private int currentPage;
//起始数据偏移量
private int offset;
//页面数据条数
private int pageSize;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getTrainContent() {
return trainContent;
}
public void setTrainContent(Integer trainContent) {
this.trainContent = trainContent;
}
public Integer getAlgoId() {
return algoId;
}
public void setAlgoId(Integer algoId) {
this.algoId = algoId;
}
public Integer getStatus() {
return status;
}
public void setStatus(Integer status) {
this.status = status;
}
public Date getBeginTime() {
return beginTime;
}
public void setBeginTime(Date beginTime) {
this.beginTime = beginTime;
}
public Date getEndTime() {
return endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getDeleteTime() {
return deleteTime;
}
public void setDeleteTime(Date deleteTime) {
this.deleteTime = deleteTime;
}
public Integer getCreatePerson() {
return createPerson;
}
public void setCreatePerson(Integer createPerson) {
this.createPerson = createPerson;
}
public Integer getIsAvailable() {
return isAvailable;
}
public void setIsAvailable(Integer isAvailable) {
this.isAvailable = isAvailable;
}
public int getCurrentPage() {
return currentPage;
}
public void setCurrentPage(int currentPage) {
this.currentPage = currentPage;
}
public int getOffset() {
return (currentPage-1)*pageSize;
}
public void setOffset(int offset) {
this.offset = offset;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
}
KafkaController.java
package com.kafka.demo.kafka_demo.controller;
import com.kafka.demo.kafka_demo.entity.TrainingInfo;
import com.kafka.demo.kafka_demo.producer.KafkaProducer;
import com.kafka.demo.kafka_demo.utils.ResultJSON;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/kafka")
public class KafkaController {
@Autowired
KafkaProducer kafkaProducer;
/**
* @Date: 2020-04-20 09:43:25
* @msg: 新增任务
* @param {type}
* @return:
*/
@RequestMapping("/addMessage")
public ResultJSON addMessage(TrainingInfo trainingInfo) {
// 任务存入到kafka消息队列中
kafkaProducer.addMessage(trainingInfo);
return ResultJSON.success();
}
}
创建生产者和定时消费者
创建生产者
package com.kafka.demo.kafka_demo.producer;
import com.alibaba.fastjson.JSON;
import com.kafka.demo.kafka_demo.entity.TrainingInfo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
@Component
public class KafkaProducer {
@Autowired
private KafkaTemplate<String, Object> kafkaTemplate;
// 启动发送消息方法
public void addMessage(TrainingInfo trainingInfo) {
String jsonStrTraining = JSON.toJSONString(trainingInfo);
System.out.println(jsonStrTraining);
kafkaTemplate.send("start_kafka", jsonStrTraining);
}
}
创建消费者
package com.kafka.demo.kafka_demo.receiver;
import java.util.List;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
import org.springframework.kafka.listener.MessageListenerContainer;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
@Service
public class KafkaReceiver {
@Autowired
private KafkaListenerEndpointRegistry registry;
private static Logger logger = LoggerFactory.getLogger(KafkaReceiver.class);
/**
* 定时执行<任务启动>监听
*
* @param recordList
* @param acknowledgment
*/
@KafkaListener(id = "start_kafka", topics = {
"start_kafka" }, groupId = "kafka-group", containerFactory = "batchFactory")
public void listenTaskStart(List<ConsumerRecord> recordList, Acknowledgment acknowledgment) {
for (ConsumerRecord record : recordList) {
JSONObject jsonObject = JSON.parseObject(record.value().toString());
int id = jsonObject.getInteger("id");
logger.info("定时查询成功");
logger.info("查询结果为id ==========>" + id);
}
acknowledgment.acknowledge();
}
// 任务启动 每隔1分钟获取一次
@Scheduled(cron = "0 * * * * ?")
public void taskStartListener() {
logger.info("开启<任务启动>监听");
MessageListenerContainer containerStart = registry.getListenerContainer("start_kafka");
if (!containerStart.isRunning()) {
containerStart.start();
}
// 恢复监听
containerStart.resume();
try {
Thread.sleep(10 * 1000);// 暂停10秒
} catch (InterruptedException e) {
e.printStackTrace();
}
logger.info("关闭<任务启动>监听");
// 暂停监听
MessageListenerContainer containerClose = registry.getListenerContainer("start_kafka");
containerClose.pause();
}
}
启动项目使用postman测试
在未发送请求时, 定时任务监听该topic的结果为空, 所以开启监听之后没有监听到任何数据, 十秒之后关闭了监听
关闭监听之后使用postman发送请求即可看到最后一行的输出信息
postman发送请求
定时任务再次被执行时, 即可获取到刚刚发送的数据, 并对数据进行消费
定时任务消费掉该消息之后, kafka中就不会存在此消息, 之后每次定时获取该topic的内容 都是空了
总结
kafka整合springboot自己也是摸索着前进, 有不足之处希望大家见谅, 有什么错误的地方, 欢迎指正 !