导入jar包
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
yaml配置
spring:
kafka:
bootstrap-servers: server
consumer:
group-id: 消费者你的groupId
enable-auto-commit: false
auto-offset-reset: earliest
#用于链接带密码的kafka 配置,如果kafka没有密码需要注释掉
properties:
sasl.mechanism: PLAIN
security.protocol: SASL_PLAINTEXT
sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="用户" password="密码";
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
#根据需要配置生产者还是消费者
#producer:
listener:
ack-mode: manual
concurrency: 1
dpkafka: bootstrap-servers: server properties: sasl.mechanism: PLAIN security.protocol: SASL_PLAINTEXT sasl.jaas.config: org.apache.kafka.common.security.plain.PlainLoginModule required username="用户" password="密码"; consumer: group-id: dp-master-original-dev auto-offset-reset: earliest auto-commit-interval: 100 enable-auto-commit: false key-deserializer: org.apache.kafka.common.serialization.StringDeserializer value-deserializer: org.apache.kafka.common.serialization.StringDeserializer # Kafka Consumer Listener 监听器配置 listener: ack-mode: manual_immediate #设置手动提交模式 # missing-topics-fatal: false # 消费监听接口监听的主题不存在时,默认会报错。所以通过设置为 false ,解决报错
自定义Config
第一个kafka会被spring自动装配,所以只需要对kafka2进行配置重新定义Config即可。
@Configuration
public class KafkaDpConfig {
@Value("${spring.dpkafka.bootstrap-servers}")
private String bootstrapServers;
@Value("${spring.dpkafka.consumer.group-id}")
private String groupId;
@Value("${spring.dpkafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
@Value("${spring.dpkafka.properties.security.protocol}")
private String kafkaSecurityProtocol;
@Value("${spring.dpkafka.properties.sasl.mechanism}")
private String kafkaSASLMechanism;
@Value("${spring.dpkafka.properties.sasl.jaas.config}")
private String kafkaConsumerSASLJaasConfig;
@Bean
public KafkaTemplate<Integer, String> kafkaDmHubTemplate() {
return new KafkaTemplate<>(producerFactory());
}
//此处最好手动指定数据源factory的bean名称,在消费者端会用到
@Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Integer, String>> dpKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Integer, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(1);
factory.getContainerProperties().setPollTimeout(3000);
//设置批量消费
factory.setBatchListener(false);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
// factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
return factory;
}
private ProducerFactory<Integer, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
public ConsumerFactory<Integer, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
private Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.RETRIES_CONFIG, 3);
props.put(ProducerConfig.ACKS_CONFIG, "1");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
private Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 20);
props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100);
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 120000);
props.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 180000);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, kafkaSecurityProtocol);
props.put(SaslConfigs.SASL_MECHANISM, kafkaSASLMechanism);
props.put("sasl.jaas.config", kafkaConsumerSASLJaasConfig);
return props;
}
}
指定kafka生产通道
public class KafkaServiceImpl implements KafkaService {
/** salesforce kafka通道*/
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
/** kafka通道*/
@Resource(name="kafkaDmHubTemplate")
private KafkaTemplate<String,String> kafkaDmHubTemplate;
/**
* 创建kafka topic
*
* @param kafkaTopicDTO kafkaTopic
* @return 返回
*/
@Override
public Boolean createTopic(KafkaTopicDTO kafkaTopicDTO) {
return false;
}
/**
* 给指定topic发消息
*
* @param topic 主题
* @param baseEntity 消息
*/
@Override
public void sendMessage(String topic, JSONObject baseEntity) {
kafkaTemplate.send(TopicConstant.CEN, JSON.toJSONString(baseEntity));
}
/**
* 给指定topic发消息(通道)
* @param topic 主题
* @param baseEntity 消息
*/
@Override
public void sendDmHubMessage(String topic, Object baseEntity) {
kafkaDmHubTemplate.send(topic, JSON.toJSONString(baseEntity));
}
}
kafka监听消费
使用@KafkaListener注解监听,此处containerFactory填写刚才定义的@Bean factory名称。
//设置批量消费 factory.setBatchListener(true);
@KafkaListener(topics = XXX,containerFactory ="dpKafkaListenerContainerFactory")
public void listenCustomer(List<String> records, Acknowledgment ack) {
try {
long start=System.currentTimeMillis();
AtomicReference<Object> response = new AtomicReference<>();
records.forEach(record -> {
...
});
}catch (Exception e){
log.error("消费数据异常:{}",e.getMessage());
}finally {
ack.acknowledge();
}
}
//factory.setBatchListener(false);设置的不是批量消费
@KafkaListener(topics = "XXX", containerFactory = "dpKafkaListenerContainerFactory")
public void listenCustomer(ConsumerRecord<String, String> consumerRecord, Acknowledgment ack) {
try {
long start=System.currentTimeMillis();
AtomicReference<Object> response = new AtomicReference<>();
String value = consumerRecord.value();
}catch (Exception e){
log.error(e.getMessage(), consumerRecord.value());
}finally {
ack.acknowledge();
}
}