直接贴代码吧
1. 配置数据源 application.yml文件
spring:
dbfirst:
url: jdbc:mysql://192.168.0.79:3306/ysn?useUnicode=true&autoReconnect=true&rewriteBatchedStatements=true&socketTimeout=30000&connectTimeout=3000
url2: jdbc:mysql://192.168.0.210:3306/ysn?useUnicode=true&autoReconnect=true&rewriteBatchedStatements=true&socketTimeout=30000&connectTimeout=3000
username: ***
password: ***
driver: com.mysql.jdbc.Driver
type: com.alibaba.druid.pool.DruidDataSource
initialSize: 5
minIdle: 5
maxActive: 20
maxWait: 60000
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
validationQuery: SELECT 1 FROM DUAL
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatementConnectionSize: 20
filters: stat,wall
connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
2. mybatis配置
package com.hlz.dao.config;
import com.alibaba.druid.filter.Filter;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.wall.WallConfig;
import com.alibaba.druid.wall.WallFilter;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.LazyConnectionDataSourceProxy;
/**
* Created with apollo
*
* @author: yanbo
* @date: Created in 2018-03-26 11:50
* @description:
*/
@Data
@Slf4j
@Configuration
@ConfigurationProperties(prefix = "spring.dbfirst")
@MapperScan(basePackages = "com.hlz.dao", sqlSessionFactoryRef = "dbFirstPrimarySqlSessionFactory")
public class DbFirstConfig {
private String url;
private String url2;
private String username;
private String password;
private String driver;
private int maxActive;
/**
* timeout for connection wait
*/
private int maxWait;
private int initialSize;
private int maxOpenPreparedStatementConnectionSize;
private int minEvictableIdleTimeMillis;
private int minIdle;
private String validationQuery;
private boolean testWhileIdle;
private boolean testOnBorrow;
private boolean testOnReturn;
private boolean poolPreparedStatements;
private String connectionProperties;
private String filters;
/**
* 主库
*
* @return
* @throws SQLException
*/
@Bean
public DataSource master() throws SQLException {
return getDruidDataSource(url);
}
/**
* 从库
*
* @return
* @throws SQLException
*/
@Bean
public DataSource slave() throws SQLException {
return getDruidDataSource(url2);
}
private DruidDataSource getDruidDataSource(String url) throws SQLException {
log.info("----------init cloud dataSource !!! ");
log.info("driver --->: " + driver + ", url --->: " + url);
DruidDataSource druidDataSource = new DruidDataSource();
druidDataSource.setDriverClassName(driver);
druidDataSource.setUrl(url);
druidDataSource.setUsername(username);
druidDataSource.setPassword(password);
druidDataSource.setMaxActive(maxActive);
druidDataSource.setInitialSize(initialSize);
druidDataSource.setMinIdle(minIdle);
druidDataSource.setMaxWait(maxWait);
druidDataSource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
druidDataSource.setValidationQuery(validationQuery);
druidDataSource.setTestWhileIdle(testWhileIdle);
druidDataSource.setTestOnBorrow(testOnBorrow);
druidDataSource.setTestOnReturn(testOnReturn);
druidDataSource.setPoolPreparedStatements(poolPreparedStatements);
druidDataSource.setMaxOpenPreparedStatements(maxOpenPreparedStatementConnectionSize);
druidDataSource.setConnectionProperties(connectionProperties);
druidDataSource.setFilters(filters);
List<Filter> filters = new ArrayList<>();
filters.add(createWallFilter());
druidDataSource.setProxyFilters(filters);
return druidDataSource;
}
/**
* @Primary 该注解表示在同一个接口有多个实现类可以注入的时候,默认选择哪一个,而不是让@autowire注解报错
* @Qualifier 根据名称进行注入,通常是在具有相同的多个类型的实例的一个注入(例如有多个DataSource类型的实例)
*/
@Bean
public DataSourceSelector dataSourceSelector(@Qualifier("master") DataSource master,
@Qualifier("slave") DataSource slave) {
Map<Object, Object> targetDataSources = new HashMap<>();
targetDataSources.put(DynamicDataSourceHolder.DB_MASTER, master);
targetDataSources.put(DynamicDataSourceHolder.DB_SLAVE, slave);
DataSourceSelector dataSource = new DataSourceSelector();
dataSource.setTargetDataSources(targetDataSources);// 该方法是AbstractRoutingDataSource的方法
dataSource.setDefaultTargetDataSource(master);// 默认的datasource设置为master
return dataSource;
}
/**
* apollo配置加载顺序:
* lhc_db -> cloud_db -> device_redis -> mqtt_redis -> mqtt -> kafka_producer -> kafka_consumer
*/
//@DependsOn("primarySqlSessionFactory")
@Bean(name = "dbFirstPrimarySqlSessionFactory")
@Primary
public SqlSessionFactory createSqlSessionFactory(@Qualifier("dataSourceSelector") DataSource dataSource) {
log.info("=================[apollo config init] order ->: 2, config ->: cloud_db");
log.info("----------init sqlSessionFactory !!! ");
SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();
sqlSessionFactoryBean.setDataSource(dataSource);
PathMatchingResourcePatternResolver resource = new PathMatchingResourcePatternResolver();
// mybatis configuration
try {
org.apache.ibatis.session.Configuration configuration = new org.apache.ibatis.session.Configuration();
// enable camel Case Table:create_time to Entity(createTime)
configuration.setMapUnderscoreToCamelCase(true);
sqlSessionFactoryBean.setConfiguration(configuration);
sqlSessionFactoryBean.setMapperLocations(resource.getResources("classpath*:/mapper/dbfirst/**/*.xml"));
sqlSessionFactoryBean.setPlugins(new Interceptor[]{new DateSourceSelectInterceptor()});
return sqlSessionFactoryBean.getObject();
} catch (Exception e) {
log.error("init SqlSessionFactory failure! ", e);
throw new RuntimeException();
}
}
@Bean
public SqlSessionTemplate createSqlSessionTemplate(SqlSessionFactory sqlSessionFactory) {
log.info("----------init sqlSessionTemplate !!! ");
return new SqlSessionTemplate(sqlSessionFactory);
}
@Bean(name = "dbFirstTransactionManager")
public DataSourceTransactionManager transactionManager(@Qualifier("dataSourceSelector") DataSource primaryDataSource) throws SQLException {
return new HlzDataSourceTransactionManager(primaryDataSource);
}
public WallFilter createWallFilter() {
WallConfig wallConfig = new WallConfig();
// 允许一次执行多条语句
wallConfig.setMultiStatementAllow(true);
wallConfig.setNoneBaseStatementAllow(true);
WallFilter wallFilter = new WallFilter();
wallFilter.setConfig(wallConfig);
return wallFilter;
}
}
3. 定义数据源key的上线文(通过threadLocal)
package com.hlz.dao.config;
public class DynamicDataSourceHolder {
/**用来存取key,ThreadLocal保证了线程安全*/
private static ThreadLocal<String> contextHolder = new ThreadLocal<String>();
/**主库*/
public static final String DB_MASTER = "master";
/**从库*/
public static final String DB_SLAVE = "slave";
/**
* 获取线程的数据源
* @return
*/
public static String getDataSourceType() {
String db = contextHolder.get();
if (db == null){
//如果db为空则默认使用主库(因为主库支持读和写)
db = DB_MASTER;
}
return db;
}
/**
* 设置线程的数据源
* @param s
*/
public static void setDataSourceType(String s) {
contextHolder.set(s);
}
/**
* 清理连接类型
*/
public static void clearDataSource(){
contextHolder.remove();
}
}
4.设置key
package com.hlz.dao.config;
import org.apache.ibatis.cache.CacheKey;
import org.apache.ibatis.executor.Executor;
import org.apache.ibatis.executor.keygen.SelectKeyGenerator;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.SqlCommandType;
import org.apache.ibatis.plugin.*;
import org.apache.ibatis.session.ResultHandler;
import org.apache.ibatis.session.RowBounds;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import java.util.Locale;
import java.util.Properties;
/**
* @description 拦截数据库操作,根据sql判断是读还是写,选择不同的数据源
*/
@Intercepts({@Signature(type = Executor.class, method = "update", args = {MappedStatement.class, Object.class}),
@Signature(type = Executor.class, method = "query", args = {MappedStatement.class, Object.class, RowBounds.class, ResultHandler.class}),
@Signature(type = Executor.class, method = "query", args = {MappedStatement.class, Object.class, RowBounds.class, ResultHandler.class, CacheKey.class, BoundSql.class})})
public class DateSourceSelectInterceptor implements Interceptor {
/**
* 正则匹配 insert、delete、update操作
*/
private static final String REGEX = ".*insert\\\\u0020.*|.*delete\\\\u0020.*|.*update\\\\u0020.*";
@Override
public Object intercept(Invocation invocation) throws Throwable {
//判断当前操作是否有事务
boolean synchonizationActive = TransactionSynchronizationManager.isSynchronizationActive();
//获取执行参数
Object[] objects = invocation.getArgs();
MappedStatement ms = (MappedStatement) objects[0];
//默认设置使用主库
String lookupKey = DynamicDataSourceHolder.DB_MASTER;
if (!synchonizationActive) {
//读方法
if (ms.getSqlCommandType().equals(SqlCommandType.SELECT)) {
//selectKey为自增主键(SELECT LAST_INSERT_ID())方法,使用主库
if (ms.getId().contains(SelectKeyGenerator.SELECT_KEY_SUFFIX)) {
lookupKey = DynamicDataSourceHolder.DB_MASTER;
} else {
BoundSql boundSql = ms.getSqlSource().getBoundSql(objects[1]);
String sql = boundSql.getSql().toLowerCase(Locale.CHINA).replace("[\\t\\n\\r]", " ");
//如果是insert、delete、update操作 使用主库
if (sql.matches(REGEX)) {
lookupKey = DynamicDataSourceHolder.DB_MASTER;
} else {
//使用从库
lookupKey = DynamicDataSourceHolder.DB_SLAVE;
}
}
}
} else {
//一般使用事务的都是写操作,直接使用主库
lookupKey = DynamicDataSourceHolder.DB_MASTER;
}
//设置数据源
DynamicDataSourceHolder.setDataSourceType(lookupKey);
return invocation.proceed();
}
@Override
public Object plugin(Object target) {
if (target instanceof Executor) {
//如果是Executor(执行增删改查操作),则拦截下来
return Plugin.wrap(target, this);
} else {
return target;
}
}
@Override
public void setProperties(Properties properties) {
}
}
5.有事务的情况下设置默认key
package com.hlz.dao.config;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import javax.sql.DataSource;
public class HlzDataSourceTransactionManager extends DataSourceTransactionManager {
public HlzDataSourceTransactionManager(DataSource dataSource) {
super(dataSource);
}
@Override
protected void doBegin(Object transaction, TransactionDefinition definition) {
DynamicDataSourceHolder.setDataSourceType(DynamicDataSourceHolder.DB_MASTER);
super.doBegin(transaction, definition);
}
}
6.通过路由获取上下文中的key对应的数据源
package com.hlz.dao.config;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
/**
* @description 继承了AbstractRoutingDataSource,动态选择数据源
*/
public class DataSourceSelector extends AbstractRoutingDataSource {
@Override
protected Object determineCurrentLookupKey() {
/* StackTraceElement[] stackTraces = Thread.currentThread().getStackTrace();
for (StackTraceElement stackTrace : stackTraces) {
if ("DataSourceTransactionManager.java".equals(stackTrace.getFileName())) {
DynamicDataSourceHolder.setDataSourceType(DynamicDataSourceHolder.DB_MASTER);
break;
}
}*/
return DynamicDataSourceHolder.getDataSourceType();
}
}