kafka mysql事务_基于Kafka消息驱动最终一致事务(二)

实现用例分析

上篇基于Kafka消息驱动最终一致事务(一)介绍BASE的理论,接着我们引入一个实例看如何实现BASE,我们会用图7显示的算法实现BASE。

9kAAA==

首先介绍使用技术栈

JDK:1.8

Spring:spring-boot,spring-data-jpa

数据库:Mysql

消息服务器:Kafka

数据表

用户库user创建用户表user,更新应用表updates_applied

CREATE TABLE `user` (

`id`INT(11) NOT NULLAUTO_INCREMENT,

`name`VARCHAR(50) NOT NULL,

`amt_sold`INT(11) NOT NULL DEFAULT '0',

`amt_bought`INT(11) NOT NULL DEFAULT '0',PRIMARY KEY(`id`)

);CREATE TABLE`updates_applied` (

`trans_id`INT(11) NOT NULL,

`balance`VARCHAR(50) NOT NULL,

`user_id` INT(11) NOT NULL);

交易库transaction创建交易库表transaction

CREATE TABLE `transaction` (

`xid`INT(11) NOT NULLAUTO_INCREMENT,

`seller_id`INT(11) NOT NULL,

`buyer_id`INT(11) NOT NULL,

`amount`INT(11) NOT NULL,PRIMARY KEY(`xid`)

);

配置两个数据源

package cn.birdstudio.user.domain;

import javax.sql.DataSource;

import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;

import org.springframework.boot.context.properties.ConfigurationProperties;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.data.jpa.repository.config.EnableJpaRepositories;

import org.springframework.orm.jpa.JpaTransactionManager;

import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;

import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;

import org.springframework.transaction.PlatformTransactionManager;

@Configuration

@EnableJpaRepositories(basePackageClasses = User.class, entityManagerFactoryRef = "userEntityManagerFactory", transactionManagerRef = "userTransactionManager")

class UserDataSourceConfiguration {

@Bean

@ConfigurationProperties("app.datasource.user")

DataSourceProperties userDataSourceProperties() {

return new DataSourceProperties();

}

@Bean

@ConfigurationProperties("app.datasource.user")

DataSource userDataSource() {

return userDataSourceProperties().initializeDataSourceBuilder().build();

}

@Bean

LocalContainerEntityManagerFactoryBean userEntityManagerFactory() {

HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();

vendorAdapter.setGenerateDdl(false);

LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();

factory.setJpaVendorAdapter(vendorAdapter);

factory.setPackagesToScan(User.class.getPackage().getName());

factory.setDataSource(userDataSource());

factory.setPersistenceUnitName("user");

return factory;

}

@Bean

PlatformTransactionManager userTransactionManager() {

JpaTransactionManager txManager = new JpaTransactionManager();

txManager.setEntityManagerFactory(userEntityManagerFactory().getObject());

return txManager;

}

}

TransactionDataSourceConfiguration

package cn.birdstudio.transaction.domain;

import javax.sql.DataSource;

import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;

import org.springframework.boot.context.properties.ConfigurationProperties;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.data.jpa.repository.config.EnableJpaRepositories;

import org.springframework.orm.jpa.JpaTransactionManager;

import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;

import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;

import org.springframework.transaction.PlatformTransactionManager;

@Configuration

@EnableJpaRepositories(basePackageClasses = Transaction.class, entityManagerFactoryRef = "transactionEntityManagerFactory", transactionManagerRef = "transactionManager")

class TransactionDataSourceConfiguration {

@Bean

@ConfigurationProperties("app.datasource.transaction")

DataSourceProperties transactionDataSourceProperties() {

return new DataSourceProperties();

}

@Bean

@ConfigurationProperties("app.datasource.transaction")

DataSource transactionDataSource() {

return transactionDataSourceProperties().initializeDataSourceBuilder().build();

}

@Bean

LocalContainerEntityManagerFactoryBean transactionEntityManagerFactory() {

HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();

vendorAdapter.setGenerateDdl(false);

LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();

factory.setJpaVendorAdapter(vendorAdapter);

factory.setPackagesToScan(Transaction.class.getPackage().getName());

factory.setDataSource(transactionDataSource());

factory.setPersistenceUnitName("transaction");

return factory;

}

@Bean

PlatformTransactionManager transactionManager() {

JpaTransactionManager txManager = new JpaTransactionManager();

txManager.setEntityManagerFactory(transactionEntityManagerFactory().getObject());

return txManager;

}

}

配置Kafka消息服务

生产者配置类KafkaProducerConfig.java,配置KafkaTransactionManager必须设置producerFactory.setTransactionIdPrefix("trans");

Configuration

public class KafkaProducerConfig {

@Bean

public ProducerFactory> producerFactory() {

DefaultKafkaProducerFactory> producerFactory = new DefaultKafkaProducerFactory<>(

producerConfigs());

producerFactory.setTransactionIdPrefix("trans");

return producerFactory;

}

@Bean

public Map producerConfigs() {

Map props = new HashMap<>();

props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.16.1.168:9092");

props.put(ProducerConfig.RETRIES_CONFIG, 2);

props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);

props.put(ProducerConfig.LINGER_MS_CONFIG, 1);

props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);

props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);

return props;

}

@Bean

public KafkaTemplate> kafkaTemplate() {

return new KafkaTemplate<>(producerFactory());

}

}

消费者配置类KafkaConsumerConfig.java,配置KafkaTransactionManager

@Configuration

@EnableKafka

public class KafkaConsumerConfig {

@Bean

public KafkaListenerContainerFactory> kafkaListenerContainerFactory(

ProducerFactory> producerFactory) {

ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>();

factory.setConsumerFactory(consumerFactory());

//factory.setMessageConverter(new StringJsonMessageConverter());

//factory.setConcurrency(3);

factory.getContainerProperties().setPollTimeout(3000);

factory.getContainerProperties().setTransactionManager(new KafkaTransactionManager<>(producerFactory));

return factory;

}

@Bean

public ConsumerFactory consumerFactory() {

JsonDeserializer jd = new JsonDeserializer<>(TransactionMessage.class);

return new DefaultKafkaConsumerFactory<>(consumerConfigs(), new StringDeserializer(), jd);

}

@Bean

public Map consumerConfigs() {

Map propsMap = new HashMap<>();

propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.16.1.168:9092");

propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);

propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");

propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");

propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonDeserializer.class);

propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");

propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");

return propsMap;

}

}

Kafka消息监听接口实现UserServiceImpl。@KafkaListener(groupId = "group1", topics = "transaction")注释监听事件接口,@Transactional("userTransactionManager")注释数据库事务。事件接口被调用KafkaTransactionManager事务开始,然后JpaTransactionManager事务开始,如果事务提交则调用producer.sendOffsetsToTransaction(),最后KafkaTransactionManager事务提交。如果JpaTransactionManager事务有异常则不调用producer.sendOffsetsToTransaction()。如果JpaTransactionManager事务提交后KafkaTransactionManager事务有异常也不调用producer.sendOffsetsToTransaction()。int processed = updatesAppliedRepository.find(trans_id, id, type.toString())语句来判断是否已经更新了User。producer.sendOffsetsToTransaction()作用与删除队列消息相当。

@Component("userService")

public class UserServiceImpl implements UserService {

private static final Logger logger = LoggerFactory.getLogger(UserServiceImpl.class);

private final UserRepository userRepository;

@Resource

private UpdatesAppliedRepository updatesAppliedRepository;

public UserServiceImpl(UserRepository userRepository) {

this.userRepository = userRepository;

}

private void sold(TransactionMessage msg) {

Type type = msg.getType();

int id = msg.getId();

int amount = msg.getAmount();

int trans_id = msg.getXid();

int processed = updatesAppliedRepository.find(trans_id, id, type.toString());

if (processed == 0) {

switch (type) {

case SELLER:

userRepository.updateAmtSold(id, amount);

break;

case BUYER:

userRepository.updateAmtBought(id, amount);

break;

}

//throwException();

UpdatesApplied updatesApplied = new UpdatesApplied();

updatesApplied.setTrans_id(trans_id);

updatesApplied.setUser_id(id);

updatesApplied.setBalance(type.toString());

updatesAppliedRepository.save(updatesApplied);

}

}

@Override

@Transactional("userTransactionManager")

@KafkaListener(groupId = "group1", topics = "transaction")

//@KafkaListener(groupId = "group1", topicPartitions = @TopicPartition(topic = "", partitionOffsets = @PartitionOffset(partition = "0", initialOffset = "5")))

public void receivekafka(TransactionMessage msg) {

logger.info("receive kafka message {}", msg);

sold(msg);

}

private void throwException() {

throw new RuntimeException("throw exception in test");

}

}

参考资料

1,http://queue.acm.org/detail.cfm?id=1394128

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值