kafkaConfig
@Slf4j
@Component
@ConditionalOnProperty(value = "middle.kafka.enabled", matchIfMissing = false)
public class KafkaConfig {
@Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServer;
@Value("${spring.kafka.consumer.enable-auto-commit:false}")
private Boolean enableAutoCommit;
@Value("${spring.kafka.consumer.auto-commit-interval:1000}")
private String autoCommitInterval;
@Value("${spring.kafka.consumer.auto-offset-reset:earliest}")
private String autoOffsetReset;
@Value("${spring.kafka.consumer.max-poll-records:100}")
private String maxPollRecords;
@Value("${spring.kafka.consumer.key-deserializer:org.apache.kafka.common.serialization.StringDeserializer}")
private String keyDeserializer;
@Value("${spring.kafka.consumer.value-deserializer:org.apache.kafka.common.serialization.StringDeserializer}")
private String valueDeserializer;
@Value("${spring.kafka.listener.concurrency:1}")
private Integer concurrency;
@Value("${spring.kafka.listener.ack-mode:MANUAL_IMMEDIATE}")
private String ackMode;
@Value("${middle.kafka.listener.missing-topics-fatal:false}")
private Boolean missingTopicsFatal;
@Bean(name = "middleGroundKafkaListenerContainerFactory")
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setBatchListener(true);
// 并发数 多个微服务实例会均分
factory.setConcurrency(this.concurrency);
factory.setMissingTopicsFatal(missingTopicsFatal);
ContainerProperties containerProperties = factory.getContainerProperties();
// 是否设置手动提交 MANUAL_IMMEDIATE:手动提交
containerProperties.setAckMode(ackMode(this.ackMode));
return factory;
}
private ContainerProperties.AckMode ackMode(String ackMode) {
ContainerProperties.AckMode mode = ContainerProperties.AckMode.valueOf(ackMode);
return mode;
}
private ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> consumerConfigs = consumerConfigs();
log.info("消费者的配置信息:{}", JSON.toJSONString(consumerConfigs));
return new DefaultKafkaConsumerFactory<>(consumerConfigs);
}
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
// 服务器地址
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapServer);
// 是否自动提交
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, this.enableAutoCommit);
// 自动提交间隔
// propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
//会话时间
// propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, kafkaListenerProperties.getSessionTimeOut());
//key序列化
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, this.keyDeserializer);
//value序列化
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, this.valueDeserializer);
// 心跳时间
// propsMap.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, kafkaListenerProperties.getHeartbeatInterval());
// 分组id
// propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaListenerProperties.getGroupId());
//消费策略
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, this.autoOffsetReset);
// poll记录数
propsMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, this.maxPollRecords);
//poll时间
// propsMap.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, kafkaListenerProperties.getMaxPollInterval());
return propsMap;
}
@Bean
public KafkaConsumer initKafkaConsumer() {
return new KafkaConsumer<>(consumerConfigs());
}
@Bean
public AdminClient initAdminClient() {
return AdminClient.create(consumerConfigs());
}
}
kafka生产者
KafkaTemplate kafkaTemplate;
public void sendMsg(){
kafkaTemplate.send("topic",msg);
}
KafkaConsumer 消费者
@Slf4j
@Component
@ConditionalOnProperty(value = "middle.kafka.enabled", matchIfMissing = false)
public class KafkaConsumer {
@Value("${spring.kafka.enabled}")
private boolean enabled;
@KafkaListener(topics = "${middle.kafka.consumer.topic:}", //topic
groupId = "${middle.kafka.consumer.group-id:group-1}", //分组
containerFactory = "middleGroundKafkaListenerContainerFactory")
public void pgSqlConsumer(List<ConsumerRecord<String, String>> records, Acknowledgment ack) {
if (!enabled) {
log.info("消费kafka消息开关关闭" );
}
log.info("开始消费kafka消息");
try {
for (ConsumerRecord<String, String> record : records) {
//取到消息后,批量提取
Optional<String> message = Optional.ofNullable(record.value());
if (message.isPresent()) {
String msg = message.get();
}
}
} catch (Exception e) {
log.error("msg error;{},msg={}", e, records);
} finally {
ack.acknowledge();//手动提交offset
}
}
}