spring-kafka批量消费

在使用kafka时,单条消费和提交有时候会影响性能。spring-kafka提供了批量拉取数据和手动提交的策略。

代码如下:

创建一个生产者:

package test.spring.kafka.producer;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import test.spring.kafka.service.KafkaMessage;

import javax.annotation.Resource;
import java.util.UUID;


@Component
public class KafkaProducer {

    @Resource
    private KafkaTemplate<String, String> kafkaTemplate;

    public void sendProducerRecord(){
        for(int i=0; i<10; i++) {
            KafkaMessage kafkaMessage = new KafkaMessage();
            kafkaMessage.setIndex(i);
            kafkaMessage.setId(UUID.randomUUID().toString());
            kafkaMessage.setValue("producerRecord " + i);
            ProducerRecord<String, String> producerRecord =
                    new ProducerRecord<>("topic2", "key1", JSONObject.toJSONString(kafkaMessage));
            kafkaTemplate.send(producerRecord);
        }
    }

}

 

创建一个消费者

 

package test.spring.kafka.consumer;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;


@Component
public class KafkaConsumer {


    @KafkaListener(topics = "topic2")
    public void receiverProducerRecord(List<ConsumerRecord<String, String>> consumerRecords, Acknowledgment acknowledgment){
        System.out.println(consumerRecords.size());
        for(ConsumerRecord<String, String> consumerRecord : consumerRecords) {
            System.out.println("receiverProducerRecord key is " + JSONObject.toJSONString(consumerRecord.key()));
            System.out.println("receiverProducerRecord value is " + JSONObject.toJSONString(consumerRecord.value()));
            // 手动提交offset
            acknowledgment.acknowledge();
        }
    }
}

 

配置文件:

server:
  port: 8083

spring:
  application:
    name: test-spring-kafka
  kafka:
    bootstrap-servers: 127.0.0.1:9092
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      auto-commit-interval: 100
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: test-group-id
      max-poll-records: 20 # 批次拉取数据的量
      # 手动提交offfset
      enable-auto-commit: false
    listener:
      ack-mode: manual_immediate # 手动
      type: batch  # 批量消费



在配置文件中关闭自动提交,开启手动提交和批量消费就可以批量消费了,但是最后需要手动提交offset

评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值