SpringBoot集成kafka

15 篇文章 0 订阅
3 篇文章 0 订阅

生产者

1、pom文件中引入相关的包

<dependency>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-web</artifactId>
	</dependency>
	<dependency>
		<groupId>org.springframework.kafka</groupId>
		<artifactId>spring-kafka</artifactId>
	</dependency>
	<dependency>
		<groupId>com.alibaba</groupId>
		<artifactId>fastjson</artifactId>
		<version>1.2.47</version>
	</dependency>

	<dependency>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-test</artifactId>
		<scope>test</scope>
	</dependency>
	<dependency>
		<groupId>org.springframework.kafka</groupId>
		<artifactId>spring-kafka-test</artifactId>
		<scope>test</scope>
	</dependency>

2、application.properties文件配置

spring.kafka.producer.bootstrap-servers=192.168.62.131:9092,192.168.62.132:9092,192.168.62.133:9092

3、消息发送端代码

实体类:
public class KafkaTestEntity implements Serializable{
    private static final long serialVersionUID = 8686191662900968929L;
    private String requestIP;
    private String requestAddr;
    private String requestTime;
    private String requestId;
    private String operationNum;
    private String status;
    private String identification;
    private String userCode;
    private String departCode;
    private String operationType;
    private String operationDetail;

    public String getRequestIP() {
        return requestIP;
    }

    public void setRequestIP(String requestIP) {
        this.requestIP = requestIP;
    }

    public String getRequestAddr() {
        return requestAddr;
    }

    public void setRequestAddr(String requestAddr) {
        this.requestAddr = requestAddr;
    }

    public String getRequestTime() {
        return requestTime;
    }

    public void setRequestTime(String requestTime) {
        this.requestTime = requestTime;
    }

    public String getRequestId() {
        return requestId;
    }

    public void setRequestId(String requestId) {
        this.requestId = requestId;
    }

    public String getOperationNum() {
        return operationNum;
    }

    public void setOperationNum(String operationNum) {
        this.operationNum = operationNum;
    }

    public String getStatus() {
        return status;
    }

    public void setStatus(String status) {
        this.status = status;
    }

    public String getIdentification() {
        return identification;
    }

    public void setIdentification(String identification) {
        this.identification = identification;
    }

    public String getUserCode() {
        return userCode;
    }

    public void setUserCode(String userCode) {
        this.userCode = userCode;
    }

    public String getDepartCode() {
        return departCode;
    }

    public void setDepartCode(String departCode) {
        this.departCode = departCode;
    }

    public String getOperationType() {
        return operationType;
    }

    public void setOperationType(String operationType) {
        this.operationType = operationType;
    }

    public String getOperationDetail() {
        return operationDetail;
    }

    public void setOperationDetail(String operationDetail) {
        this.operationDetail = operationDetail;
    }
}
消息发送代码
	@Component
	@EnableScheduling
	public class KafkaProducer {
    @Autowired
    private  KafkaTemplate kafkaTemplate;

    /**
     * 定时任务
     */
    @Scheduled(cron = "0/5 * * * * ?")
    public void send(){

        KafkaTestEntity kafkaTestEntity = new KafkaTestEntity ();
        //操作步骤
//        int operationNum = (int)(1+Math.random()*10);
        //请求唯一ID
//        String requestId = UUID.randomUUID().toString();
        //业务唯一标识
//        String identification = UUID.randomUUID().toString();
        kafkaTestEntity .setDepartCode("10030");
        kafkaTestEntity .setIdentification(UUID.randomUUID().toString());
        kafkaTestEntity .setOperationDetail("11111");
        kafkaTestEntity .setOperationNum(((int)(1+Math.random()*10))+"");
        kafkaTestEntity .setOperationType("1");
        kafkaTestEntity .setRequestAddr("/kafkatest");
        kafkaTestEntity .setRequestId(UUID.randomUUID().toString());
        kafkaTestEntity .setRequestTime("2019-03-07 10:12:00");
        kafkaTestEntity .setUserCode("10003");
        kafkaTestEntity .setStatus("OK");
        kafkaTestEntity .setRequestIP("192.168.0.132");

        String message = JSON.toJSONString(kafkaTestEntity );

        ListenableFuture future = kafkaTemplate.send("logtest", message);
        future.addCallback(o -> System.out.println("send-消息发送成功:" + message), throwable -> System.out.println("消息发送失败:" + message));
    }

消费者

1、pom文件

	<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-web</artifactId>
		</dependency>
		<dependency>
			<groupId>org.springframework.kafka</groupId>
			<artifactId>spring-kafka</artifactId>
		</dependency>
		<dependency>
			<groupId>com.alibaba</groupId>
			<artifactId>fastjson</artifactId>
			<version>1.2.47</version>
		</dependency>

		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-test</artifactId>
			<scope>test</scope>
		</dependency>
		<dependency>
			<groupId>org.springframework.kafka</groupId>
			<artifactId>spring-kafka-test</artifactId>
			<scope>test</scope>
		</dependency>

2、application.properties文件配置

spring.kafka.consumer.auto-offset-reset=latest 
spring.kafka.consumer.enable-auto-commit=true
#spring.kafka.consumer.group-id=testgroup2
#test-consume-group
spring.kafka.bootstrap-servers=192.168.62.131:9092,192.168.62.132:9092,192.168.62.133:9092

#spring.kafka.consumer.auto-offset-rese有三种配置:
1、 earliest
#当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
2、latest
#当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
3、none
#topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常

3、消费消息

@Component
public class ConsumerService {

    @KafkaListener(topics = {"logtest"},groupId = "test-consume-group")
    public void consumer(String message){
//        logger.info("test topic message : {}", message);
        System.out.println("app_log--消费消息:" + message);
 //       KafkaTestEntity kafkaTestEntity = JSON.parseObject(message,KafkaTestEntity .class);
    }

}
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值