配置文件
package com.cheche365.dictonary.datatrans.datatrans.config;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Map;
import java.util.Properties;
public abstract class BaseKafkaProperties {
public static void sslConnectConfig(Map<String,Object> props, String username, String password) {
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
props.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "D:\\WorkSpace\\brf-server\\pushData\\kafka.client.truststore.jks");
props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "KafkaOnsClient");
props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";";
String jaasCfg = String.format(jaasTemplate, username, password);
props.put("sasl.jaas.config", jaasCfg);
}
}
package com.cheche365.dictonary.datatrans.datatrans.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author sunyan
* @date 2022/11/15 11:16
* @description
*/
@Configuration
@Slf4j
public class DefaultConfig {
@Value("${spring.kafka.consumer.username}")
private String userName;
@Value("${spring.kafka.consumer.password}")
private String password;
@Autowired
private KafkaProperties properties;
Map<String, Object> consumerConfig() {
Map<String, Object> streamsProperties = this.properties.buildConsumerProperties();
streamsProperties.put(ProducerConfig.ACKS_CONFIG, "all");
BaseKafkaProperties.sslConnectConfig(streamsProperties, userName, password);
return streamsProperties;
}
@Bean("batchContainerFactory")
public ConcurrentKafkaListenerContainerFactory containerFactory(){
ConcurrentKafkaListenerContainerFactory container = new ConcurrentKafkaListenerContainerFactory();
container.setConsumerFactory(new DefaultKafkaConsumerFactory(consumerConfig()));
//设置并发量,小于或等于Topic的分区数
container.setConcurrency(5);
//设置为批量监听
container.setBatchListener(true);
return container;
}
}
kafkaListener的配置
package com.cheche365.dictonary.datatrans.datatrans.listener;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cheche365.dictonary.datatrans.datatrans.service.OriginDataService;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.time.LocalDate;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* @author sunyan
* @date 2022/12/19 16:58
* @description
*/
@Component
@AllArgsConstructor
@Slf4j
public class KafkaConsumerListener {
OriginDataService originSaveService;
@KafkaListener(topics = "brf-sync-bedrock-1-dev",containerFactory = "batchContainerFactory",properties = {})
private void kafkaOriginData(String records){
}
}