解决Sharding-jdbc不支持sql问题
在很多项目中,我们可能只是对数据库中一张或两张表进行分表分库处理,其余表依然按照正常的处理逻辑。若此时所有的表的操作都交给sharding处理的话,其一 是效率会受到很大的影响,其二是sharding jdbc会不支持很多sql的写法。
在这里为了绕开sharding jdbc对数据源的管理,需要多配置一个数据源 dataSourceForSpring,该数据源不交给sharding-jdbc来管理(其实还是同一个数据库也可以是分开的),而是spring直接管理。
解决办法如下:
例:有两张表 t_user和t_goods 需要对t_user分库分表 t_goods不需要 新建数据库sharding-jdbc(t_user,t_goods ) 和 sharding-jdbc2(t_user)
1、数据源配置
application.properties
#从数据源
spring.shadingjdbc.datasource2.url=jdbc:mysql://localhost:3306/sharding-jdbc2?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&useSSL=false
spring.shadingjdbc.datasource2.username=root
spring.shadingjdbc.datasource2.password=XMKbnAd0k/ItgRzV53cOFto5fe8pzcFRz2D9bN41OBPGM7XF27XQSu99Vi3B6vJHQsngtgBWZiOlKTZUrTtEnw==
#主数据源
spring.shadingjdbc.datasource.url=jdbc:mysql://localhost:3306/sharding-jdbc?useUnicode=true&characterEncoding=utf8&allowMultiQueries=true&useServerPrepStmts=true&cachePrepStmts=true
spring.shadingjdbc.datasource.username=root
spring.shadingjdbc.datasource.password=XMKbnAd0k/ItgRzV53cOFto5fe8pzcFRz2D9bN41OBPGM7XF27XQSu99Vi3B6vJHQsngtgBWZiOlKTZUrTtEnw==
#装载数据源类
@ConfigurationProperties(prefix = SHARDING_JDBC_DATASOURCE)
@Data
public static class Druid{
private String url;
private String username;
private String password;
private String initialSize;
private String minIdle;
private String maxActive;
private String maxWait;
private String timeBetweenEvictionRunsMillis;
private String minEvictableIdleTimeMillis;
private String validationQuery;
private String testWhileIdle;
private String testOnBorrow;
private String testOnReturn;
private String poolPreparedStatements;
private String maxPoolPreparedStatementPerConnectionSize;
private String connectionProperties;
private String filters;
private String removeAbandoned;
private String removeAbandonedTimeout;
private String logAbandoned;
public Map<String,String> propertyToMap() throws IllegalAccessException{
Map<String,String> propertyMap = new HashMap<>(1 << 5);
Field[] fields= this.getClass().getDeclaredFields();
for (Field field:fields) {
field.setAccessible(true);
propertyMap.put(field.getName(),(String)field.get(this));
}
return propertyMap;
}
}
2,设置sharding数据源和spring数据源
//数据源设置
@Bean(name = "dataSource2")
@ConfigurationProperties(prefix = "spring.shadingjdbc.datasource2")
public DataSource dataSource2() {
return DataSourceBuilder.create().build();
}
/**
* 数据源1 将交给sharding 管理
* @param druidConfig
* @return
*/
@Bean(name = "dataSource1")
public DataSource dataSource1(DataSourceConfig.Druid druidConfig) {
DataSource result = null;
try {
result = DruidDataSourceFactory.createDataSource(druidConfig.propertyToMap());
}catch (Exception e){
throw new RuntimeException("数据源获取失败");
}
return result;
}
/**
* 主数据源将交给spring 管理
* @param druidConfig
* @return
*/
@Bean(name = "dataSourceForSpring")
public DataSource dataSourceForSpring(DataSourceConfig.Druid druidConfig) {
DataSource result = null;
try {
result = DruidDataSourceFactory.createDataSource(druidConfig.propertyToMap());
}catch (Exception e){
throw new RuntimeException("数据源获取失败");
}
return result;
}
@Bean
public DataSourceRule dataSourceRule(@Qualifier("dataSource1")DataSource dataSource1,
@Qualifier("dataSource2")DataSource dataSource2){
//设置分库映射
Map<String, DataSource> dataSourceMap = new HashMap<>();
dataSourceMap.put("dataSource1", dataSource1);
dataSourceMap.put("dataSource2", dataSource2);
return new DataSourceRule(dataSourceMap,"dataSource1");
}
@Bean("shardingRule")
public ShardingRule shardingRule(DataSourceRule dataSourceRule) throws SQLException {
// 按月动态分表
TableRule byMonthTableRule = TableRule.builder("t_user").
tableShardingStrategy(new TableShardingStrategy("txdate",
new DynamicSubTableByMouthAlgorithm("t_user_"))).
dataSourceRule(dataSourceRule)
.dynamic(true).build();
return ShardingRule.builder()
.dataSourceRule(dataSourceRule)
.tableRules(Arrays.asList(byMonthTableRule))
.databaseShardingStrategy(new DatabaseShardingStrategy("id",new DynamicSubDatabaseByIdAlgorithm()))
.build();
}
@Bean(name="dynamicDataSource")
public DynamicDataSource dataSource(@Qualifier("shardingRule") ShardingRule shardingRule,@Qualifier("dataSourceForSpring") DataSource dataSourceForSpring) throws SQLException {
DataSource shardingDataSource = ShardingDataSourceFactory.createDataSource(shardingRule);
Map<Object, Object> targetDataSources = new HashMap<>();
targetDataSources.put(DataSourceType.SHARDING_DATASOURCE,shardingDataSource);
targetDataSources.put(DataSourceType.SPRING_DATASOURCE,dataSourceForSpring);
DynamicDataSource dataSource = new DynamicDataSource();
dataSource.setTargetDataSources(targetDataSources);
dataSource.setDefaultTargetDataSource(dataSourceForSpring);
return dataSource;
}
/**
* @param dataSource
* @return
*/
@Bean
public DataSourceTransactionManager transactitonManager(@Qualifier("dynamicDataSource") DynamicDataSource dataSource){
return new DataSourceTransactionManager(dataSource);
}
@Bean(name = "sqlSessionFactory")
public SqlSessionFactory sqlSessionFactory(@Qualifier("dynamicDataSource")DynamicDataSource dataSource) throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/*Mapper.xml"));
return bean.getObject();
}
@Bean(name = "sqlSessionTemplate")
@Primary
public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
3,动态数据源
public class DynamicDataSource extends AbstractRoutingDataSource {
@Override
protected Object determineCurrentLookupKey() {
return DatabaseContextHolder.getDatabaseType();
}
}
public enum DataSourceType {
SHARDING_DATASOURCE,SPRING_DATASOURCE;
}
public class DatabaseContextHolder {
private static final ThreadLocal<DataSourceType> contextHolder = new ThreadLocal<>();
public static void setDatabaseType(DataSourceType type){
contextHolder.set(type);
}
public static DataSourceType getDatabaseType(){
return contextHolder.get();
}
}
4,操作数据库
//分表service
@Service
public class UserService {
@Resource
private UserMapper userMapper;
public void insert(UserModel userModel){
DatabaseContextHolder.setDatabaseType(DataSourceType.SHARDING_DATASOURCE);
userMapper.save(userModel);
}
public void batchInsert(List<UserModel> userModels){
DatabaseContextHolder.setDatabaseType(DataSourceType.SHARDING_DATASOURCE);
userModels.stream().forEach(userMapper::save);
}
public List<UserModel> selectByPage(Map<String,String> params){
DatabaseContextHolder.setDatabaseType(DataSourceType.SHARDING_DATASOURCE);
return userMapper.selectByPage(params);
}
}
//非分表service
@Service
public class GoodsService {
@Resource
private GoodsMapper goodsMapper;
public void insert(GoodsModel userModel){
DatabaseContextHolder.setDatabaseType(DataSourceType.SPRING_DATASOURCE);
goodsMapper.save(userModel);
}
public void batchInsert(List<GoodsModel> goodsModels){
DatabaseContextHolder.setDatabaseType(DataSourceType.SPRING_DATASOURCE);
goodsModels.stream().forEach(goodsMapper::save);
}
public List<GoodsModel> selectByPage(Map<String,String> params){
DatabaseContextHolder.setDatabaseType(DataSourceType.SPRING_DATASOURCE);
return goodsMapper.selectByPage(params);
}
}
5,分库分表逻辑类
//分库
public class DynamicSubDatabaseByIdAlgorithm implements SingleKeyDatabaseShardingAlgorithm<Integer> {
@Override
public String doEqualSharding(Collection<String> databaseNames, ShardingValue<Integer> shardingValue) {
for (String each:databaseNames ) {
if (each.endsWith((shardingValue.getValue() % 2+1) +"")){
return each;
}
}
throw new IllegalArgumentException();
}
@Override
public Collection<String> doInSharding(Collection<String> databaseNames, ShardingValue<Integer> shardingValue) {
Collection<String> result = new LinkedHashSet<>(databaseNames.size());
for (Integer value : shardingValue.getValues()) {
for (String tableName : databaseNames) {
if (tableName.endsWith((value % 2+1) + "")) {
result.add(tableName);
}
}
}
return result;
}
@Override
public Collection<String> doBetweenSharding(Collection<String> databaseNames, ShardingValue<Integer> shardingValue) {
Collection<String> result = new LinkedHashSet<>(databaseNames.size());
Range<Integer> range = (Range<Integer>) shardingValue.getValueRange();
for (Integer i = range.lowerEndpoint(); i <= range.upperEndpoint(); i++) {
for (String each : databaseNames) {
if (each.endsWith((i % 2+1) + "")) {
result.add(each);
}
}
}
return result;
}
}
//分表
@RequiredArgsConstructor
public class DynamicSubTableByMouthAlgorithm implements SingleKeyTableShardingAlgorithm<Date> {
private final String tablePrefix;
@Override
public String doEqualSharding(final Collection<String> availableTargetNames, final ShardingValue<Date> shardingValue) {
return tablePrefix +getStringDate(shardingValue.getValue());
}
@Override
public Collection<String> doInSharding(final Collection<String> availableTargetNames, final ShardingValue<Date> shardingValue) {
Collection<String> result = new LinkedHashSet<>(shardingValue.getValues().size());
for (Date value : shardingValue.getValues()) {
result.add(tablePrefix + getStringDate(value));
}
return result;
}
@Override
public Collection<String> doBetweenSharding(final Collection<String> availableTargetNames, final ShardingValue<Date> shardingValue) {
Collection<String> result = new LinkedHashSet<>(availableTargetNames.size());
Range<Date> range = shardingValue.getValueRange();
return result;
}
public static String getStringDate(Date date) {
SimpleDateFormat formatter = new SimpleDateFormat("yyyyMM");
String dateString = formatter.format(date);
return dateString;
}
}