多数据源情况时,spring自带事务注解@Transactional会失效,不想使用分布式事务,继承Connection,重写commit()和close()方法,实现多数据源事务控制
1、注解类
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD})
public @interface DBTransaction {
}
2、切面类
@Aspect
@Configuration
@Log4j
public class DBTransactionAop {
@Pointcut("@annotation(cn.ggdo.annotation.DBTransaction)")
public void transactPoint() {}
@Around("transactPoint()")
public Object multiTranAop(ProceedingJoinPoint joinPoint) throws Throwable {
// 开启事务
log.info("DBTransaction开启事务");
TransactionContext.openTran();
try {
// 执行业务
Object proceed = joinPoint.proceed();
// 提交事务
log.info("DBTransaction提交事务");
for (DBConnection connection : DBDataSource.MULTI_TRAN_CONNECTION.get()) {
connection.commitMultiDbTran();
connection.closeMultiDbTran();
}
return proceed;
} catch (Throwable t) {
log.info("DBTransaction回滚事务");
for (DBConnection connection : DBDataSource.MULTI_TRAN_CONNECTION.get()) {
// 事务回滚
connection.rollback();
connection.closeMultiDbTran();
}
throw t;
} finally {
// 清空 事务 连接,关闭当前事务
DBDataSource.MULTI_TRAN_CONNECTION.get().clear();
TransactionContext.closeTran();
}
}
}
3、数据源加载类
@Configuration
public class DBDataSourceConfig {
@Autowired
private SourceConfig sourceConfig;
@Autowired
private YamlSourceConfig yamlSourceConfig;
/**
使用 自定义 数据源 MultiDataSource
*/
@Bean(name = "master")
public DataSource buildMainDataSource() {
DBDataSource multiDataSource = new DBDataSource();
multiDataSource.setDefaultTargetDataSource(buildDataSource(yamlSourceConfig.getMasterName()));
return multiDataSource;
}
/** 自定义构建数据方法 */
public DruidDataSource buildDataSource(String taskId) {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setDriverClassName(sourceConfig.getDriverClassName());
dataSource.setUrl(sourceConfig.getUrl().replace("{dbName}", taskId));
dataSource.setUsername(sourceConfig.getUsername());
dataSource.setPassword(sourceConfig.getPassword());
return dataSource;
}
}
4、事务的开关类,会把开启事务的数据源连接存储到ThreadLocal,统一处理
public class TransactionContext {
private static final ThreadLocal<Boolean> TRAN_SWITCH_CONTEXT = new ThreadLocal<>();
static {
// 默认事务处于关闭状态
TRAN_SWITCH_CONTEXT.set(false);
}
// 开启事务
public static void openTran() {
TRAN_SWITCH_CONTEXT.set(true);
}
// 关闭事务
public static void closeTran() {
TRAN_SWITCH_CONTEXT.set(false);
}
// 判断是否开启事务
public static Boolean isOpenTran() {
if (TRAN_SWITCH_CONTEXT.get() == null){
// 默认事务处于关闭状态
TRAN_SWITCH_CONTEXT.set(false);
}
return TRAN_SWITCH_CONTEXT.get();
}
}
5、数据源切换类,存储当前数据源名称dbname,每次切换数据源更新。
@Log4j
public final class DatabaseContext {
private static final ThreadLocal<String> CUR_DB = new ThreadLocal<>();
static {
/** 默认使用的数据源 */
CUR_DB.set(DataSourceTypeEnum.MASTER.name());
}
/**
* 切换 数据源
* @param dbName
*/
public static void switchDb(String dbName) {
log.info("切换到{"+dbName+"}数据源:");
CUR_DB.set(dbName);
}
/**
* 获取当前 数据源
* @return
*/
public static String getCurDb() {
if (CUR_DB.get() == null){
CUR_DB.set(DataSourceTypeEnum.MASTER.name());
}
return CUR_DB.get();
}
}
6、动态数据源切换实现类,继承AbstractDataSource,重写getConnection()和getConnection(String username, String password),实现多数据源事务的处理。
/**
* 动态数据源
*/
@Log4j
public class DBDataSource extends AbstractDataSource implements InitializingBean {
/** 主数据源 */
private DruidDataSource defaultTargetDataSource;
@Autowired
private DBDataSourceConfig dataSourceConfig;
@Autowired
private YamlSourceConfig yamlSourceConfig;
/**
* 其他的动态数据源,统一起来方便管理
*/
private static final Map<String, DruidDataSource> DATA_SOURCE_MAP = new ConcurrentHashMap<>();
/**
* 多数据源 执行 事务期间用到的连接
*/
public static final ThreadLocal<List<DBConnection>> MULTI_TRAN_CONNECTION = new ThreadLocal<>();
static {
MULTI_TRAN_CONNECTION.set(new ArrayList<>());
}
private void addDBChangeConnection(DBConnection connection){
if (MULTI_TRAN_CONNECTION.get() == null){
MULTI_TRAN_CONNECTION.set(new ArrayList<>());
}
MULTI_TRAN_CONNECTION.get().add(connection);
}
/**
* 如果开启事务,就将连接缓存到 MULTI_TRAN_CONNECTION 中
*/
@Override
public Connection getConnection() throws SQLException {
DBConnection customConnection = new DBConnection(getDataSource().getConnection());
if (TransactionContext.isOpenTran()) {
customConnection.setAutoCommit(false);
addDBChangeConnection(customConnection);
}
return customConnection;
}
@Override
public Connection getConnection(String username, String password) throws SQLException {
DBConnection customConnection = new DBConnection(getDataSource().getConnection(username, password));
if (TransactionContext.isOpenTran()) {
customConnection.setAutoCommit(false);
addDBChangeConnection(customConnection);
}
return customConnection;
}
/**
* 获取 dataSource 的时候,可以使用 LRU 算法,对 DataSource 进行热点排序,便于清理
* @return
*/
protected DruidDataSource getDataSource() {
DruidDataSource dataSource;
String key = DatabaseContext.getCurDb();
if ((dataSource = DATA_SOURCE_MAP.get(key)) == null) {
synchronized (this) {
if (DATA_SOURCE_MAP.get(key) == null) {
// 创建新的数据源
dataSource = dataSourceConfig.buildDataSource(key);
DATA_SOURCE_MAP.put(key, dataSource);
}
}
}
return dataSource;
}
@Override
@SuppressWarnings("unchecked")
public <T> T unwrap(Class<T> iface) throws SQLException {
if (iface.isInstance(this)) {
return (T) this;
}
return getDataSource().unwrap(iface);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return (iface.isInstance(this) || getDataSource().isWrapperFor(iface));
}
public void setDefaultTargetDataSource(DruidDataSource defaultTargetDataSource) {
this.defaultTargetDataSource = defaultTargetDataSource;
}
@Override
public void afterPropertiesSet() throws Exception {
DATA_SOURCE_MAP.put(DataSourceTypeEnum.MASTER.name(), defaultTargetDataSource);
}
public Boolean createDataSourceWithCheck(EdcProjectEnvironmentInfoDto edcProjectEnvironmentInfoDto) throws Exception {
EdcProjectEnvironmentInfoDto dataSource = edcProjectEnvironmentInfoDto;
String dbName = (yamlSourceConfig.getDatabaseName().replace("{environment}",dataSource.getEnvironmentType())).replace("{projectId}",dataSource.getProjectId().toString());
log.info("正在检查数据源:" + dbName);
Map<String, DruidDataSource> dynamicTargetDataSources2 = this.DATA_SOURCE_MAP;
if (dynamicTargetDataSources2.containsKey(dbName)) {
log.info("数据源" + dbName + "之前已经创建,准备测试数据源是否正常...");
DruidDataSource druidDataSource = dynamicTargetDataSources2.get(dbName);
boolean rightFlag = true;
Connection connection = null;
try {
log.info(dbName + "数据源的概况->当前闲置连接数:" + druidDataSource.getPoolingCount());
long activeCount = druidDataSource.getActiveCount();
log.info(dbName + "数据源的概况->当前活动连接数:" + activeCount);
if (activeCount > 0) {
log.info(dbName + "数据源的概况->活跃连接堆栈信息:" + druidDataSource.getActiveConnectionStackTrace());
}
log.info("准备获取数据库连接...");
connection = druidDataSource.getConnection();
log.info("数据源" + dbName + "正常");
} catch (Exception e) {
log.error(e.getMessage(), e); //把异常信息打印到日志文件
rightFlag = false;
log.info("缓存数据源" + dbName + "已失效,准备删除...");
if (delDatasources(dbName)) {
log.info("缓存数据源删除成功");
} else {
log.info("缓存数据源删除失败");
}
} finally {
if (null != connection) {
connection.close();
}
}
if (rightFlag) {
log.info("不需要重新创建数据源");
} else {
log.info("准备重新创建数据源...");
createDataSource(dataSource);
log.info("重新创建数据源完成");
}
return true;
} else {
return createDataSource(dataSource);
}
}
// 删除数据源
public boolean delDatasources(String datasourceid) {
Map<String, DruidDataSource> dynamicTargetDataSources2 = this.DATA_SOURCE_MAP;
if (dynamicTargetDataSources2.containsKey(datasourceid)) {
Set<DruidDataSource> druidDataSourceInstances = DruidDataSourceStatManager.getDruidDataSourceInstances();
for (DruidDataSource l : druidDataSourceInstances) {
if (datasourceid.equals(l.getName())) {
dynamicTargetDataSources2.remove(datasourceid);
DruidDataSourceStatManager.removeDataSource(l);
return true;
}
}
return false;
} else {
return false;
}
}
/**
* 创建数据源
* @param dataSource
* @return
* @throws Exception
*/
private Boolean createDataSource(EdcProjectEnvironmentInfoDto dataSource) throws Exception {
// String datasourceId = dataSource.getDatasourceId();
String dbName = yamlSourceConfig.getDatabaseName().replace("{environment}",dataSource.getEnvironmentType()).replace("{projectId}",dataSource.getProjectId().toString());
log.info("准备创建数据源" + dbName);
String databasetype = dataSource.getDatabasetype();
String username = dataSource.getUserName();
String password = dataSource.getPassWord();
String url = dataSource.getUrl();
String driveClass = dataSource.getDriverClassName();
if (testDatasource(dbName, driveClass, url, username, password)) {
boolean result = this.createDataSource(dbName, driveClass, url, username, password, databasetype);
if (!result) {
log.error("数据源" + dbName + "配置正确,但是创建失败");
return false;
}
} else {
log.error("数据源配置有错误");
return false;
}
return true;
}
// 创建数据源
public boolean createDataSource(String key, String driveClass, String url, String username, String password, String databasetype) {
try {
try { // 排除连接不上的错误
Class.forName(driveClass);
DriverManager.getConnection(url, username, password);// 相当于连接数据库
} catch (Exception e) {
return false;
}
DruidDataSource druidDataSource = dataSourceConfig.buildDataSource(key);
// druidDataSource.setName(key);
// druidDataSource.setDriverClassName(driveClass);
// druidDataSource.setUrl(url);
// druidDataSource.setUsername(username);
// druidDataSource.setPassword(password);
druidDataSource.setInitialSize(1); //初始化时建立物理连接的个数。初始化发生在显示调用init方法,或者第一次getConnection时
druidDataSource.setMaxActive(20); //最大连接池数量
druidDataSource.setMaxWait(60000); //获取连接时最大等待时间,单位毫秒。当链接数已经达到了最大链接数的时候,应用如果还要获取链接就会出现等待的现象,等待链接释放并回到链接池,如果等待的时间过长就应该踢掉这个等待,不然应用很可能出现雪崩现象
druidDataSource.setMinIdle(5); //最小连接池数量
druidDataSource.setTestOnBorrow(true); //申请连接时执行validationQuery检测连接是否有效,这里建议配置为TRUE,防止取到的连接不可用
druidDataSource.setTestWhileIdle(true);//建议配置为true,不影响性能,并且保证安全性。申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。
// druidDataSource.setValidationQuery(validationQuery); //用来检测连接是否有效的sql,要求是一个查询语句。如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会起作用。
druidDataSource.setFilters("stat");//属性类型是字符串,通过别名的方式配置扩展插件,常用的插件有:监控统计用的filter:stat日志用的filter:log4j防御sql注入的filter:wall
druidDataSource.setTimeBetweenEvictionRunsMillis(60000); //配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
druidDataSource.setMinEvictableIdleTimeMillis(180000); //配置一个连接在池中最小生存的时间,单位是毫秒,这里配置为3分钟180000
druidDataSource.setKeepAlive(true); //打开druid.keepAlive之后,当连接池空闲时,池中的minIdle数量以内的连接,空闲时间超过minEvictableIdleTimeMillis,则会执行keepAlive操作,即执行druid.validationQuery指定的查询SQL,一般为select * from dual,只要minEvictableIdleTimeMillis设置的小于防火墙切断连接时间,就可以保证当连接空闲时自动做保活检测,不会被防火墙切断
druidDataSource.setRemoveAbandoned(true); //是否移除泄露的连接/超过时间限制是否回收。
druidDataSource.setRemoveAbandonedTimeout(3600); //泄露连接的定义时间(要超过最大事务的处理时间);单位为秒。这里配置为1小时
druidDataSource.setLogAbandoned(true); //移除泄露连接发生是是否记录日志
druidDataSource.init();
this.DATA_SOURCE_MAP.put(key, druidDataSource);
log.info(key + "数据源初始化成功");
// log.info(key+"数据源的概况:"+druidDataSource.dump());
return true;
} catch (Exception e) {
log.error(e + "");
return false;
}
}
// 测试数据源连接是否有效
private boolean testDatasource(String key, String driveClass, String url, String username, String password) {
try {
Class.forName(driveClass);
DriverManager.getConnection(url, username, password);
return true;
} catch (Exception e) {
return false;
}
}
}
7、继承Connection,重写commit()和close()方法,区分为事务开启和未开启两种状态
public class DBConnection implements Connection {
// 真实的连接
private Connection connection;
public DBConnection(Connection connection) {
this.connection = connection;
}
@Override
public void commit() throws SQLException {
// 如果没开启多数据源事务,则走 commit
if (!TransactionContext.isOpenTran()) {
connection.commit();
}
}
public void commitMultiDbTran() throws SQLException {
// 如果开启多数据源,则走 这里的 commit
connection.commit();
}
@Override
public void close() throws SQLException {
// mybatis 执行完业务后,会触发 close() 操作,如果 connection 被提前 close 了,业务就会出错
if (!TransactionContext.isOpenTran()) {
connection.close();
}
}
public void closeMultiDbTran() throws SQLException {
// 如果开启多数据源事务,则走 这里的 close
connection.close();
}
@Override
public Statement createStatement() throws SQLException {
return connection.createStatement();
}
/**
* 其他方法直接@Override
*/
}