使用要点:
1、kafka生产者开启事务方式,在创建 ProducerFactory 时设置 transactionIdPrefix 参数;
2、kafka生产者设置 transactionIdPrefix 参数后,会默认开启幂等;
3、kafka生产者开启幂等后,要求 acks 必须为 all,否则运行时报错;
4、kafka生产者开启幂等后,要求重试参数 retries 必须大于0,否则启动报错;
5、手动配置 KafkaTransactionManager 后,如果还要使用 DataSourceTranscationManager,需要手动配置 DataSourceTranscationManager,DataSourceTransactionManagerAutoConfiguration 自动加载的条件是 @ConditionalOnMissingBean(TransactionManager.class)
6、kafka生产者的事务由 KafkaProducer 自己管理,事务回滚由 KafkaProducer 内部的 TransactionMananger 完成,Spring 不会使用 KafkaTransactionManager 进行事务回滚;
7、kafka生产者即使触发事务回滚,数据依然会发送到kafka中,并不会从kafka中删除,只是提交状态会有所不同;
8、kafka消费者如果只消费事务提交的数据,需要设置隔离级别 isolation.level 为 read_committed,否则会消费所有数据(包括回滚数据);
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
/**
* kafka 生产者配置
*
* @author : clown
* @date : 2024-05-21 15:30
**/
@Configuration
public class KafkaProducerConfig {
/**
* 生产者配置参数
*
* @return
*/
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
// kafka地址
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
// 数据提交间隔
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
// 生产者开始事务时,会默认开启幂等,开启幂等要求 asks=all,否则会报错
props.put(ProducerConfig.ACKS_CONFIG, "all");
// 设置序列化方式
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
/**
* 开启事务的生产者工厂
*
* @return
*/
@Bean
public ProducerFactory producerFactoryWithTx() {
DefaultKafkaProducerFactory factory = new DefaultKafkaProducerFactory<>(producerConfigs());
factory.setTransactionIdPrefix("my-kafka-tran-");
return factory;
}
/**
* 未开启事务的生产者工厂
*
* @return
*/
@Bean
public ProducerFactory producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
/**
* kafka 事务管理器
* KafkaTemplate 内部使用 Kafka 生产者来发送消息, 并管理事务的边界,Spring不会直接使用 KafkaTransactionManager 来管理Kafka生产者的事务
* 简单说,kafka 生产者的事务,由 kafka 生产者 KafkaProducer 自己管理,并不会使用 KafkaTransactionManager 管理,
* KafkaTransactionManager 为 Spring 提供了与 Kafka 事务交互的接口,以便在必要时与其他资源(如数据库)的事务进行协调,并且支持 Spring 的事务传播行为
* @return
*/
@Bean
public KafkaTransactionManager kafkaTransactionManager() {
return new KafkaTransactionManager(producerFactoryWithTx());
}
/**
* 开启事务的 kafkaTemplate
* kafkaTemplateWithTx 需要在标记 @Transaction 的方法中使用,否则会报错
* @return
*/
@Bean
public KafkaTemplate kafkaTemplateWithTx() {
return new KafkaTemplate(producerFactoryWithTx());
}
/**
* 未开启事务的 kafkaTemplate
*
* @return
*/
@Bean
public KafkaTemplate kafkaTemplate() {
return new KafkaTemplate(producerFactory());
}
}
/**
* kafka 消费者配置
*
* @author : clown
* @date : 2024-05-22 14:26
**/
@Configuration
public class KafkaConsumerConfig {
/**
* 消费者配置参数
*
* @return
*/
public Map getConsumerProps() {
Map consumerProps = new Properties();
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhot:9092");
// 设置手动提交,使用事务时,手动提交和自动提交都可以
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
// 使用事务时,必须设置成 read_committed,否则会收到生产者回滚过的消息
consumerProps.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
// 反序列化方式
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return consumerProps;
}
/**
* 消费者工厂
*
* @return
*/
@Bean
public ConsumerFactory consumerFactory() {
return new DefaultKafkaConsumerFactory<>(getConsumerProps());
}
/**
* 监听容器工厂
*
* @return
*/
@Bean
public KafkaListenerContainerFactory kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Object, Object> containerFactory = new ConcurrentKafkaListenerContainerFactory<>();
// 并发线程数
containerFactory.setConcurrency(5);
containerFactory.setConsumerFactory(consumerFactory());
// 批量消费
containerFactory.setBatchListener(true);
// 手动提交时,设置 ack 方式
containerFactory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
return containerFactory;
}
}
/**
* 数据库事务配置
*
* 由于 spring-boot DataSourceTransactionManagerAutoConfiguration 自动加载的条件是 @ConditionalOnMissingBean(TransactionManager.class)
* 所以在 KafkaProducerConfig 中手动配置了 KafkaTransactionManager 后,会导致 spring-boot 不会初始化 DataSourceTransactionManager,
* 结果会导致数据库事务失效,所以需要手动配置 DataSourceTransactionManager
*
* @author : clown
* @date : 2024-05-21 16:44
**/
@Configuration
public class DataSourceConfig {
@Bean
@Primary
public DataSourceTransactionManager dataSourceTransactionManager(Environment environment, DataSource dataSource,
ObjectProvider<TransactionManagerCustomizers> transactionManagerCustomizers) {
DataSourceTransactionManager transactionManager = createTransactionManager(environment, dataSource);
transactionManagerCustomizers.ifAvailable((customizers) -> customizers.customize(transactionManager));
return transactionManager;
}
private DataSourceTransactionManager createTransactionManager(Environment environment, DataSource dataSource) {
return environment.getProperty("spring.dao.exceptiontranslation.enabled", Boolean.class, Boolean.TRUE)
? new JdbcTransactionManager(dataSource) : new DataSourceTransactionManager(dataSource);
}
}
/**
* kafka 生产者业务代码
*
* @author : clown
* @date : 2024-05-21 16:08
**/
@Service
public class KafkaProducerService {
@Resource
private KafkaTemplate kafkaTemplate;
@Resource
private KafkaTemplate kafkaTemplateWithTx;
@Autowired
private JdbcTemplate jdbcTemplate;
/**
* 使用未开启事务的 kafkaTemplate,结果不会回滚
*/
@Transactional(rollbackFor = Exception.class)
public void sendToKafka() {
kafkaTemplate.send("iov-test", DateUtil.format(new Date(), DatePattern.NORM_DATETIME_PATTERN));
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new RuntimeException();
}
/**
* 使用了开启事务的 kafkaTemplateWithTx,结果会回滚
*/
@Transactional(rollbackFor = Exception.class)
public void sendToKafkaWithTx() {
kafkaTemplateWithTx.send("iov-test", DateUtil.format(new Date(), DatePattern.NORM_DATETIME_PATTERN));
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new RuntimeException();
}
/**
* 由于使用了未开启事务的 kafkaTemplate,所以kafka不会回滚
* 由于默认的 TransactionManager 设置的是 DataSourceTransactionManager,所以数据库会回滚
*/
@Transactional(rollbackFor = Exception.class)
public void sendToKafkaWithoutTxAndDbWithTx() {
kafkaTemplate.send("iov-test", DateUtil.format(new Date(), DatePattern.NORM_DATETIME_PATTERN));
jdbcTemplate.update("INSERT INTO test_table (user_id, name, `desc`, CRT_USER_ID, CRT_TIME, MDF_USER_ID, MDF_TIME) VALUES(100, '测试', '测试mysql事务', '1', '2017-09-12 13:58:24', '1', '2017-09-12 14:04:20')");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new RuntimeException();
}
/**
* 由于使用了开启事务的 kafkaTemplateWithTx,所以kafka会回滚
* 由于默认的 TransactionManager 设置的是 DataSourceTransactionManager,所以数据库会回滚
*/
@Transactional(rollbackFor = Exception.class)
public void sendToKafkaWithTxAndDbWithTx() {
kafkaTemplateWithTx.send("iov-test", DateUtil.format(new Date(), DatePattern.NORM_DATETIME_PATTERN));
jdbcTemplate.update("INSERT INTO test_table (user_id, name, `desc`, CRT_USER_ID, CRT_TIME, MDF_USER_ID, MDF_TIME) VALUES(100, '测试', '测试mysql事务', '1', '2017-09-12 13:58:24', '1', '2017-09-12 14:04:20')");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new RuntimeException();
}
/**
* 由于使用了开启事务的 kafkaTemplateWithTx,所以kafka会回滚
* 由于默认的 TransactionManager 设置的是 kafkaTransactionManager,所以数据库不会回滚
*/
@Transactional(rollbackFor = Exception.class, value = "kafkaTransactionManager")
public void sendToKafkaWithTxAndDbWithoutTx() {
kafkaTemplateWithTx.send("iov-test", DateUtil.format(new Date(), DatePattern.NORM_DATETIME_PATTERN));
jdbcTemplate.update("INSERT INTO test_table (user_id, name, `desc`, CRT_USER_ID, CRT_TIME, MDF_USER_ID, MDF_TIME) VALUES(100, '测试', '测试mysql事务', '1', '2017-09-12 13:58:24', '1', '2017-09-12 14:04:20')");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new RuntimeException();
}
}
/**
* kafka 消费者业务
*
* @author : clown
* @date : 2024-05-22 14:36
**/
@Component
public class KafkaConsumerService {
@KafkaListener(topics = "iov-test", containerFactory = "kafkaListenerContainerFactory", groupId = "myTest")
public void listener(List<ConsumerRecord> records, Acknowledgment ack) {
for (ConsumerRecord cr : records) {
System.err.println(cr.value());
}
System.err.println("=====================================================");
ack.acknowledge();
}
}
/**
* 单元测试
*
* @author : clown
* @date : 2024-05-22 16:24
**/
@SpringBootTest(classes = KafkaProducerMain.class)
@RunWith(SpringRunner.class)
public class MyTest {
@Autowired
private KafkaProducerService kafkaProducerService;
@Test
public void testKafka() {
kafkaProducerService.sendToKafka();
}
@Test
public void testKafkaWithTx() {
kafkaProducerService.sendToKafkaWithTx();;
}
@Test
public void testKafkaWithoutTxAndDbWithTx() {
kafkaProducerService.sendToKafkaWithoutTxAndDbWithTx();
}
@Test
public void testKafkaWithTxAndDbWithTx() {
kafkaProducerService.sendToKafkaWithTxAndDbWithTx();
}
@Test
public void testKafkaWithTxAndDbWithoutTx() {
kafkaProducerService.sendToKafkaWithTxAndDbWithoutTx();
}
}
/**
* 启动类
*
* @author : clown
* @date : 2024-05-21 15:39
**/
@SpringBootApplication
public class KafkaProducerMain {
public static void main(String[] args) {
SpringApplication.run(KafkaProducerMain.class, args);
}
}
配置文件
# 配置tomcat端口号
server:
port: 8090
# 配置数据源
spring:
datasource:
driverClassName: com.mysql.jdbc.Driver
url: jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&useSSL=false&autoReconnect=true&failOverReadOnly=false
username: xxxxxx
password: xxxxxxx