kafka的yml配置
kafka工场配置
package com.djz.hand.config;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ContainerProperties;
import java.util.Map;
/**
* kafka 监听器使用的containerFactory:
* 包含以下类型:批量消费自动提交(默认)、批量消费手动提交、单条消费自动、单条消费手动
* 参数:enable-auto-commit,type,ack-mode (监听都带注解containerFactory)
* 配置1 :true,single, record 或 false,single, record或 true,batch, record或 false,batch, record
* 执行优先级 singleManual>singleAuto>batchManual>batchAuto
* <p>
* 配置2:true,batch, manual 或false,batch, manual 或true,batch, manual_immediate
* 执行优先级 singleManual>singleAuto>batchManual>batchAuto
* <p>
* 配置3:false,batch, manual_immediate
* 执行优先级 singleManual>batchManual>singleAuto>batchAuto
* <p>
* 配置4:true,single, manual_immediate
* 执行优先级 singleAuto>batchManual>singleManual>batchAuto
* <p>
* 配置5:false,single, manual_immediate
* 执行优先级 singleManual>singleAuto>batchManual>batchAuto
*
* @author Administrator
*/
@Configuration
public class KafkaConsumeFactory {
@Autowired
private KafkaProperties kafkaProperties;
/**
* 批量消费,手动提交offset
*/
@Bean("batchManualContainerFactory")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> batchManualContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(true, false);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
return factory;
}
/**
* 批量消费,自动提交offset
*/
@Bean("batchAutoContainerFactory")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> batchAutoContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(true, true);
return factory;
}
/**
* 单条消费,手动提交offset
*/
@Bean("singleManualContainerFactory")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> singleManualContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(false, false);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
return factory;
}
/**
* 单条消费,自动提交offset
*/
@Bean("singleAutoContainerFactory")
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> singleAutoContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = getContainer(false, true);
return factory;
}
private ConcurrentKafkaListenerContainerFactory<String, String> getContainer(boolean batch, boolean autoCommit) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory(autoCommit));
factory.setAutoStartup(true);
factory.setConcurrency(kafkaProperties.getListener().getConcurrency());
factory.getContainerProperties().setPollTimeout(1500);
factory.setBatchListener(batch);
return factory;
}
public ConsumerFactory<String, String> consumerFactory(boolean autoCommit) {
Map<String, Object> configs = kafkaProperties.buildConsumerProperties();
configs.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
return new DefaultKafkaConsumerFactory<>(configs);
}
}
kafka监听配置
package com.djz.hand.listen;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author dujiangzhou
* @date 2023/8/24 18:02
*/
@Component
public class MessageListener {
/**
* 都带containerFactory注解下,优先级
*
* @param record
*/
@KafkaListener(topics = "testMessage", containerFactory = "singleAutoContainerFactory")
public void onMessage(ConsumerRecord<String, String> record) {
System.out.println("1号机,已完成短信发送业务的自动单条消费,id:" + record.value());
}
/**
* 都带containerFactory注解下,优先级
*
* @param record
*/
@KafkaListener(topics = "testMessage", containerFactory = "singleManualContainerFactory")
public void onMessage(ConsumerRecord<String, String> record, Acknowledgment ack) {
System.out.println("1号机分身,已完成短信发送业务的手动单条消费,id:" + record.value());
ack.acknowledge();
}
/**
* 都带containerFactory注解下,优先级
*
* @param records
*/
@KafkaListener(topics = "testMessage", containerFactory = "batchManualContainerFactory")
public void onMessage(List<ConsumerRecord<String, String>> records, Acknowledgment ack) {
System.out.println("2号机,完成短信发送业务的手动批量消费中: " + records.size());
for (ConsumerRecord<String, String> record : records) {
System.out.println("2号机,已完成短信发送业务手动的消费,id:" + record.value());
}
ack.acknowledge();
}
/**
* 都带containerFactory注解下,优先级
*
* @param records
*/
@KafkaListener(topics = "testMessage", containerFactory = "batchAutoContainerFactory")
public void onMessage(List<ConsumerRecord<String, String>> records) {
System.out.println("2号机,完成短信发送业务的自动批量消费中: " + records.size());
for (ConsumerRecord<String, String> record : records) {
System.out.println("2号机,已完成短信发送业务的自动消费,id:" + record.value());
}
}
}
kafka发送消息类
package com.djz.hand.service.impl;
import com.djz.hand.service.MessageService;
import org.apache.kafka.common.protocol.types.Field;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
/**
* @author dujiangzhou
* @date 2023/8/24 17:59
*/
@Service
public class MessageServiceImpl implements MessageService {
@Resource
private KafkaTemplate<String, String> kafkaTemplate;
@Resource
private RedisTemplate<String, Object> redisTemplate;
@Override
public void sendMessage(String id) {
for (int i = 1; i <= 100; i++) {
int data = (Integer.parseInt(id) + i);
System.out.println("待发送短信纳入处理队列中(kafka),id:" + data);
kafkaTemplate.send("testMessage", "talking", String.valueOf(data));
System.out.println("待发送短信纳入处理队列(kafka)成功,id:" + data);
}
}
}
结论
* kafka 监听器使用的containerFactory:
* 包含以下类型:批量消费自动提交(默认)、批量消费手动提交、单条消费自动、单条消费手动
* 参数:enable-auto-commit,type,ack-mode (监听都带注解containerFactory)
* 配置1 :true,single, record 或 false,single, record或 true,batch, record或 false,batch, record
* 执行优先级 singleManual>singleAuto>batchManual>batchAuto
* <p>
* 配置2:true,batch, manual 或false,batch, manual 或true,batch, manual_immediate
* 执行优先级 singleManual>singleAuto>batchManual>batchAuto
* <p>
* 配置3:false,batch, manual_immediate
* 执行优先级 singleManual>batchManual>singleAuto>batchAuto
* <p>
* 配置4:true,single, manual_immediate
* 执行优先级 singleAuto>batchManual>singleManual>batchAuto
* <p>
* 配置5:false,single, manual_immediate
* 执行优先级 singleManual>singleAuto>batchManual>batchAuto