kafka消费代码 改下参数就行其他不变直接用

package com.qsdi.kafka;


import java.util.HashMap;
import java.util.Map;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;

@Configuration
@EnableKafka
public class KafkaConsumerConfiguration {
    protected static final Logger LOGGER = LoggerFactory.getLogger(KafkaConsumerConfiguration.class);
    //kafka消费主题
    private static String consumerServer = "192.168.1.114:9092";
    //消费组
    private static String groupId = "rctool";
    //kafka监听开关
    private static String isOpenListener="on";
    //kafka数据类型 0是字符串 1是字节
    private static String kafkaDataType="0";

    private static Logger logger = LoggerFactory.getLogger(KafkaConsumerConfiguration.class);
    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        if(!isOpenListener.equals("on")){
            factory.setAutoStartup(false);
        }
        else{
            factory.setAutoStartup(true);
        }
        factory.setConsumerFactory(consumerFactory());
        factory.setConcurrency(2);
        factory.getContainerProperties().setPollTimeout(1500);
        logger.info("------------------------kafka 消费者-----------------------");
        return factory;
    }

    // 根据consumerProps填写的参数创建消费者工厂
    @Bean
    public ConsumerFactory<String, String> consumerFactory() {
        return new DefaultKafkaConsumerFactory<>(consumerProps());
    }

    // 消费者配置参数
    private Map<String, Object> consumerProps() {
        Map<String, Object> props = new HashMap<>();
        // 连接地址
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, consumerServer);
        // GroupID
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "HK_GROUP");
        // 是否自动提交
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
        // 自动提交的频率
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
        // Session超时设置
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
        //1是字节序列化 0是字符串序列化
        if(kafkaDataType.equals("1")){
            // 键的反序列化方式
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
            // 值的反序列化方式
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
        }
        else{
            // 键的反序列化方式
            props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
            // 值的反序列化方式
            props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        }


        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        // log.info("Kafka消费者属性:{}",props);

        return props;
    }

    @Bean
    public KafkaConsumerListerner listener() {
        return new KafkaConsumerListerner();
    }

}


package com.qsdi.kafka;

import com.qsdi.service.impl.HKTaskServiceImpl;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;


@Component
public class KafkaConsumerListerner {
    private static final Logger log = LoggerFactory.getLogger(KafkaConsumerListerner.class);
    @Autowired
    HKTaskServiceImpl hkTaskService;

    @KafkaListener(topics = "HIKVISIO_FTP")
    public void yishaVehicleListenerByByte(ConsumerRecord<String, String> record) {
        hkTaskService.getKafkaData(record);

    }

}


使用的jar

   <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值