kafka简单消息处理与配置

文章详细解释了Kafka监听器中containerFactory的配置,包括批量和单条消费的自动提交与手动提交策略,以及它们的执行优先级。
摘要由CSDN通过智能技术生成

kafka的yml配置
在这里插入图片描述

kafka工场配置

package com.djz.hand.config;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;

import java.util.Map;

/**
 * kafka 监听器使用的containerFactory:
 * 包含以下类型:批量消费自动提交(默认)、批量消费手动提交、单条消费自动、单条消费手动
 * 参数:enable-auto-commit,type,ack-mode  (监听都带注解containerFactory)
 * 配置1 :true,single, record 或 false,single, record或 true,batch, record或 false,batch, record
 * 执行优先级 singleManual>singleAuto>batchManual>batchAuto
 * <p>
 * 配置2:true,batch, manual 或false,batch, manual 或true,batch, manual_immediate
 * 执行优先级 singleManual>singleAuto>batchManual>batchAuto
 * <p>
 * 配置3:false,batch, manual_immediate
 * 执行优先级 singleManual>batchManual>singleAuto>batchAuto
 * <p>
 * 配置4:true,single, manual_immediate
 * 执行优先级 singleAuto>batchManual>singleManual>batchAuto
 * <p>
 * 配置5:false,single, manual_immediate
 * 执行优先级 singleManual>singleAuto>batchManual>batchAuto
 *
 * @author Administrator
 */
@Configuration
public class KafkaConsumeFactory {


    @Autowired
    private KafkaProperties kafkaProperties;

    /**
     * 批量消费,手动提交offset   
     */
    @Bean("batchManualContainerFactory")
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> batchManualContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(true, false);
        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
        return factory;
    }

    /**
     * 批量消费,自动提交offset  
     */
    @Bean("batchAutoContainerFactory")
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> batchAutoContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(true, true);
        return factory;
    }

    /**
     * 单条消费,手动提交offset  
     */
    @Bean("singleManualContainerFactory")
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> singleManualContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(false, false);
        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
        return factory;
    }

    /**
     * 单条消费,自动提交offset   
     */
    @Bean("singleAutoContainerFactory")
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> singleAutoContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(false, true);
        return factory;
    }

    private ConcurrentKafkaListenerContainerFactory<String, String> getContainer(boolean batch, boolean autoCommit) {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory(autoCommit));
        factory.setAutoStartup(true);
        factory.setConcurrency(kafkaProperties.getListener().getConcurrency());
        factory.getContainerProperties().setPollTimeout(1500);
        factory.setBatchListener(batch);
        return factory;
    }

    public ConsumerFactory<String, String> consumerFactory(boolean autoCommit) {
        Map<String, Object> configs = kafkaProperties.buildConsumerProperties();
        configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
        return new DefaultKafkaConsumerFactory<>(configs);
    }
}

kafka监听配置

package com.djz.hand.listen;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;

/**
 * @author dujiangzhou
 * @date 2023/8/24 18:02
 */
@Component
public class MessageListener {

    /**
     * 都带containerFactory注解下,优先级
     *
     * @param record
     */
    @KafkaListener(topics = "testMessage", containerFactory = "singleAutoContainerFactory")
    public void onMessage(ConsumerRecord<String, String> record) {
        System.out.println("1号机,已完成短信发送业务的自动单条消费,id:" + record.value());
    }

    /**
     * 都带containerFactory注解下,优先级
     *
     * @param record
     */
    @KafkaListener(topics = "testMessage", containerFactory = "singleManualContainerFactory")
    public void onMessage(ConsumerRecord<String, String> record, Acknowledgment ack) {
        System.out.println("1号机分身,已完成短信发送业务的手动单条消费,id:" + record.value());
        ack.acknowledge();
    }

    /**
     * 都带containerFactory注解下,优先级
     *
     * @param records
     */
    @KafkaListener(topics = "testMessage", containerFactory = "batchManualContainerFactory")
    public void onMessage(List<ConsumerRecord<String, String>> records, Acknowledgment ack) {
        System.out.println("2号机,完成短信发送业务的手动批量消费中: " + records.size());
        for (ConsumerRecord<String, String> record : records) {
            System.out.println("2号机,已完成短信发送业务手动的消费,id:" + record.value());
        }
        ack.acknowledge();
    }

    /**
     * 都带containerFactory注解下,优先级
     *
     * @param records
     */
    @KafkaListener(topics = "testMessage", containerFactory = "batchAutoContainerFactory")
    public void onMessage(List<ConsumerRecord<String, String>> records) {
        System.out.println("2号机,完成短信发送业务的自动批量消费中: " + records.size());
        for (ConsumerRecord<String, String> record : records) {
            System.out.println("2号机,已完成短信发送业务的自动消费,id:" + record.value());
        }
    }
}

kafka发送消息类

package com.djz.hand.service.impl;

import com.djz.hand.service.MessageService;
import org.apache.kafka.common.protocol.types.Field;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;


/**
 * @author dujiangzhou
 * @date 2023/8/24 17:59
 */
@Service
public class MessageServiceImpl implements MessageService {

    @Resource
    private KafkaTemplate<String, String> kafkaTemplate;

    @Resource
    private RedisTemplate<String, Object> redisTemplate;

    @Override
    public void sendMessage(String id) {
        for (int i = 1; i <= 100; i++) {
            int data = (Integer.parseInt(id) + i);
            System.out.println("待发送短信纳入处理队列中(kafka),id:" + data);
            kafkaTemplate.send("testMessage", "talking", String.valueOf(data));
            System.out.println("待发送短信纳入处理队列(kafka)成功,id:" + data);
        }
    }
}

结论

* kafka 监听器使用的containerFactory:
 * 包含以下类型:批量消费自动提交(默认)、批量消费手动提交、单条消费自动、单条消费手动
 * 参数:enable-auto-commit,type,ack-mode  (监听都带注解containerFactory)
 * 配置1true,single, recordfalse,single, recordtrue,batch, recordfalse,batch, record
 * 执行优先级 singleManual>singleAuto>batchManual>batchAuto
 * <p>
 * 配置2true,batch, manual 或false,batch, manual 或true,batch, manual_immediate
 * 执行优先级 singleManual>singleAuto>batchManual>batchAuto
 * <p>
 * 配置3false,batch, manual_immediate
 * 执行优先级 singleManual>batchManual>singleAuto>batchAuto
 * <p>
 * 配置4true,single, manual_immediate
 * 执行优先级 singleAuto>batchManual>singleManual>batchAuto
 * <p>
 * 配置5false,single, manual_immediate
 * 执行优先级 singleManual>singleAuto>batchManual>batchAuto
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
你可以使用librdkafka库来在C++中消费Kafka消息。以下是一个简单的示例代码: ```cpp #include <iostream> #include <cstdlib> #include <csignal> #include <librdkafka/rdkafkacpp.h> static bool running = true; // Kafka消息消费回调函数 class ExampleConsumeCb : public RdKafka::ConsumeCb { public: void consume_cb(RdKafka::Message &msg, void *opaque) { switch (msg.err()) { case RdKafka::ERR_NO_ERROR: // 处理接收到的消息 std::cout << "Received message: " << msg.payload() << std::endl; break; case RdKafka::ERR__TIMED_OUT: // 超时错误 break; default: // 其他错误 std::cerr << "Error occurred: " << msg.errstr() << std::endl; break; } } }; // Ctrl+C信号处理函数 static void sigterm(int sig) { running = false; } int main() { std::string brokers = "localhost:9092"; // Kafka broker地址 std::string topic = "test_topic"; // 要消费的Kafka主题 std::string errstr; RdKafka::Conf *conf = RdKafka::Conf::create(RdKafka::Conf::CONF_GLOBAL); conf->set("bootstrap.servers", brokers, errstr); RdKafka::Consumer *consumer = RdKafka::Consumer::create(conf, errstr); if (!consumer) { std::cerr << "Failed to create consumer: " << errstr << std::endl; exit(1); } // 订阅主题 RdKafka::ErrorCode err = consumer->subscribe({topic}); if (err) { std::cerr << "Failed to subscribe to topic: " << RdKafka::err2str(err) << std::endl; exit(1); } ExampleConsumeCb consume_cb; consumer->poll(0); // 初始化消费者 signal(SIGINT, sigterm); // 注册Ctrl+C信号处理函数 while (running) { // 消费消息 RdKafka::Message *msg = consumer->consume(1000); consume_cb.consume_cb(*msg, nullptr); delete msg; } // 关闭消费者 consumer->close(); delete consumer; // 销毁配置对象 delete conf; return 0; } ``` 上述代码通过librdkafka库创建一个Kafka消费者,并订阅指定的主题。在循环中,它会不断地消费消息,并使用`ExampleConsumeCb`类中定义的回调函数处理接收到的消息。通过注册Ctrl+C信号处理函数,你可以使用Ctrl+C来停止消费消息。 请注意,在使用此代码之前,你需要先安装librdkafka库,并将其链接到你的C++项目中。 希望对你有所帮助!如果有任何问题,请随时提问。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值