阿里云SASL_SSL连接kafka

配置文件

package com.cheche365.dictonary.datatrans.datatrans.config;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;

import java.util.Map;
import java.util.Properties;

public abstract class BaseKafkaProperties {

    public static void sslConnectConfig(Map<String,Object> props, String username, String password) {
        props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
        props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
        props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "D:\\WorkSpace\\brf-server\\pushData\\kafka.client.truststore.jks");
        props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "KafkaOnsClient");
        props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
        String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";";
        String jaasCfg = String.format(jaasTemplate, username, password);
        props.put("sasl.jaas.config", jaasCfg);
    }
}
package com.cheche365.dictonary.datatrans.datatrans.config;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import lombok.extern.slf4j.Slf4j;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;

import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.util.StringUtils;

import javax.sql.DataSource;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @author sunyan
 * @date 2022/11/15 11:16
 * @description
 */
@Configuration
@Slf4j
public class DefaultConfig {
    @Value("${spring.kafka.consumer.username}")
    private String userName;

    @Value("${spring.kafka.consumer.password}")
    private String password;

    @Autowired
    private KafkaProperties properties;

    Map<String, Object> consumerConfig() {
        Map<String, Object> streamsProperties = this.properties.buildConsumerProperties();

        streamsProperties.put(ProducerConfig.ACKS_CONFIG, "all");
        BaseKafkaProperties.sslConnectConfig(streamsProperties, userName, password);
        return streamsProperties;
    }
    @Bean("batchContainerFactory")
    public ConcurrentKafkaListenerContainerFactory containerFactory(){
        ConcurrentKafkaListenerContainerFactory container = new ConcurrentKafkaListenerContainerFactory();
        container.setConsumerFactory(new DefaultKafkaConsumerFactory(consumerConfig()));

        //设置并发量,小于或等于Topic的分区数
        container.setConcurrency(5);
        //设置为批量监听
        container.setBatchListener(true);
        return container;
    }
}

kafkaListener的配置

package com.cheche365.dictonary.datatrans.datatrans.listener;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cheche365.dictonary.datatrans.datatrans.service.OriginDataService;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.time.LocalDate;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;

/**
 * @author sunyan
 * @date 2022/12/19 16:58
 * @description
 */
@Component
@AllArgsConstructor
@Slf4j
public class KafkaConsumerListener {
    OriginDataService originSaveService;

    @KafkaListener(topics = "brf-sync-bedrock-1-dev",containerFactory = "batchContainerFactory",properties = {})
    private void kafkaOriginData(String records){

    }

}

  • 3
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值