java配置多数据源

三个数据库:master主库、back_one从库1、back_two从库2

1.application.yml配置三个数据库信息

spring:
  datasource:
    driver-class-name : com.mysql.jdbc.Driver
    # 主库
    master:
      jdbcUrl : jdbc:mysql://xxx:3306/master?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&serverTimezone=GMT%2b8&allowMultiQueries=true
      username : xxx
      password : xxx
      name :
    # 从库1
    slave:
      enabled: true  # 从数据源开关/默认关闭
      jdbcUrl: jdbc:mysql://xxx:3306/back_one?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&serverTimezone=GMT%2b8
      username: xxx
      password: xxx
      name :
    # 从库2
    slave2:
      enabled: true  # 从数据源开关/默认关闭
      jdbcUrl: jdbc:mysql://xxx:3306/back_two?useUnicode=true&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&serverTimezone=GMT%2b8
      username: xxx
      password: xxx
      name :
    druid.maxActive : 150
    druid.initialSize : 5
    druid.minIdle : 5
    druid.maxWait : 60000
    druid.filters : stat
    druid.timeBetweenEvictionRunsMillis : 60000
    druid.minEvictableIdleTimeMillis : 300000
    druid.validationQuery : SELECT 1    #多个数据源时配置:select count(*) from dual
    druid.testWhileIdle : true
    druid.testOnBorrow : false
    druid.testOnReturn : false
    druid.poolPreparedStatements : false
    druid.maxPoolPreparedStatementPerConnectionSize : -1
    druid.timeBetweenLogStatsMillis : 0
    druid.keep-alive : true
    druid.connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000;druid.stat.logSlowSql=true

注意:

一个数据源时:validationQuery: SELECT 1;

多个数据源时:validationQuery: select count(*) from dual

2.代码

(1)DataSource

import java.lang.annotation.*;

@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface DataSource {

    DataSourceType value() default DataSourceType.MASTER;

}

(2)DataSourceAspect

import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;

import java.lang.reflect.Method;


@Aspect
@Order(1)
@Component
public class DataSourceAspect {

    @Pointcut("execution(public * com.xxx.mapper..*.*(..))")
    public void pointcut() {}

    @Before("pointcut()")
    public void doBefore(JoinPoint joinPoint)
    {
        Method method = ((MethodSignature)joinPoint.getSignature()).getMethod();
        DataSource dataSource = method.getAnnotation(DataSource.class);  //获取方法上的注解
        if(dataSource == null){
            Class<?> clazz= joinPoint.getTarget().getClass().getInterfaces()[0];
            dataSource =clazz.getAnnotation(DataSource.class);  //获取类上面的注解
            if(dataSource == null) return;
        }
        if (dataSource != null) {
            DynamicDataSourceContextHolder.setDataSourceType(dataSource.value().name());
        }
    }

    @After("pointcut()")
    public void after(JoinPoint point) {
        //清理掉当前设置的数据源,让默认的数据源不受影响
        DynamicDataSourceContextHolder.clearDataSourceType();
    }
}

(3)DataSourceConfig

import com.alibaba.druid.pool.DruidDataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.Primary;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;

@Configuration
public class DataSourceConfig {

    //主数据源
    @Value("${spring.datasource.master.jdbcUrl}")
    private String masterJdbcUrl;
    @Value("${spring.datasource.master.username}")
    private String masterUsername;
    @Value("${spring.datasource.master.password}")
    private String masterPassword;
    @Value("${spring.datasource.driver-class-name}")
    private String masterDriverClassName;
    @Value("${spring.datasource.master.name}")
    private String name;
    //从数据源1
    @Value("${spring.datasource.slave.jdbcUrl}")
    private String slaveJdbcUrl1;
    @Value("${spring.datasource.slave.username}")
    private String slaveUsername1;
    @Value("${spring.datasource.slave.password}")
    private String slavePassword1;
    @Value("${spring.datasource.driver-class-name}")
    private String slaveDriverClassName1;
    @Value("${spring.datasource.slave.name}")
    private String slaveName;
    //从数据源2
    @Value("${spring.datasource.slave2.jdbcUrl}")
    private String slaveJdbcUrl2;
    @Value("${spring.datasource.slave2.username}")
    private String slaveUsername2;
    @Value("${spring.datasource.slave2.password}")
    private String slavePassword2;
    @Value("${spring.datasource.driver-class-name}")
    private String slaveDriverClassName2;
    @Value("${spring.datasource.slave2.name}")
    private String slaveName2;
    //其他相关配置
    @Value("${spring.datasource.druid.initialSize}")
    private int initialSize;
    @Value("${spring.datasource.druid.minIdle}")
    private int minIdle;
    @Value("${spring.datasource.druid.maxWait}")
    private int maxWait;
    @Value("${spring.datasource.druid.filters}")
    private String filters;
    @Value("${spring.datasource.druid.timeBetweenEvictionRunsMillis}")
    private int timeBetweenEvictionRunsMillis;
    @Value("${spring.datasource.druid.minEvictableIdleTimeMillis}")
    private int minEvictableIdleTimeMillis;
    @Value("${spring.datasource.druid.validationQuery}")
    private String validationQuery;
    @Value("${spring.datasource.druid.testWhileIdle}")
    private boolean testWhileIdle;
    @Value("${spring.datasource.druid.testOnBorrow}")
    private boolean testOnBorrow;
    @Value("${spring.datasource.druid.testOnReturn}")
    private boolean testOnReturn;
    @Value("${spring.datasource.druid.poolPreparedStatements}")
    private boolean poolPreparedStatements;
    @Value("${spring.datasource.druid.maxPoolPreparedStatementPerConnectionSize}")
    private int maxPoolPreparedStatementPerConnectionSize;
    @Value("${spring.datasource.druid.timeBetweenLogStatsMillis}")
    private int timeBetweenLogStatsMillis;
    @Value("${spring.datasource.druid.keep-alive}")
    private boolean keepAlive;
    @Value("${spring.datasource.druid.connectionProperties}")
    private String connectionProperties;


    @Bean
    public DataSource masterDataSource() {
        return generateDataSource(masterJdbcUrl,masterUsername,masterPassword,masterDriverClassName,name);
    }

    @Bean
    @ConditionalOnProperty(prefix = "spring.datasource.slave", name = "enabled", havingValue = "true")
    public DataSource slaveDataSource1() {
        return generateDataSource(slaveJdbcUrl1,slaveUsername1,slavePassword1,slaveDriverClassName1,slaveName);
    }

    @Bean
    @ConditionalOnProperty(prefix = "spring.datasource.slave2", name = "enabled", havingValue = "true")
    public DataSource slaveDataSource2() {
        return generateDataSource(slaveJdbcUrl2,slaveUsername2,slavePassword2,slaveDriverClassName2,slaveName2);
    }


    private DruidDataSource generateDataSource(String url,String username,String password,String driverClassName,String name){
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(username);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);
        datasource.setName(name);
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        datasource.setKeepAlive(keepAlive);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            System.err.println("druid configuration initialization filter: " + e);
        }
        datasource.setConnectionProperties(connectionProperties);
        datasource.setTimeBetweenLogStatsMillis(timeBetweenLogStatsMillis);
        return datasource;
    }

    @Bean(name = "dynamicDataSource")
    @DependsOn({"masterDataSource", "slaveDataSource1", "slaveDataSource2"})  //如果不加这个,会报错:The dependencies of some of the beans in the application context form a cycle
    @Primary
    public DynamicDataSource dataSource(DataSource masterDataSource, DataSource slaveDataSource1, DataSource slaveDataSource2) {
        Map<Object, Object> targetDataSources = new HashMap<>();
        targetDataSources.put(DataSourceType.MASTER.name(), masterDataSource);
        targetDataSources.put(DataSourceType.BACKONE.name(), slaveDataSource1);
        targetDataSources.put(DataSourceType.BACKTWO.name(), slaveDataSource2);
        return new DynamicDataSource(masterDataSource, targetDataSources);
    }

}

(4)DataSourceType

public enum DataSourceType {
    /**
     * 主库
     */
    MASTER,

    /**
     * 从库1
     */
    BACKONE,

    /**
     * 从库2
     */
    BACKTWO;
}

(5)DynamicDataSource

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

import javax.sql.DataSource;
import java.util.Map;

public class DynamicDataSource extends AbstractRoutingDataSource {

    public DynamicDataSource(DataSource defaultTargetDataSource, Map<Object, Object> targetDataSources) {
        super.setDefaultTargetDataSource(defaultTargetDataSource);
        super.setTargetDataSources(targetDataSources);
        // afterPropertiesSet()方法调用时用来将targetDataSources的属性写入resolvedDataSources中的
        super.afterPropertiesSet();
    }

    /**
     * 根据Key获取数据源的信息
     */
    @Override
    protected Object determineCurrentLookupKey() {
        return DynamicDataSourceContextHolder.getDataSourceType();
    }

}

(6)DynamicDataSourceContextHolder

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class DynamicDataSourceContextHolder {
    public static final Logger log = LoggerFactory.getLogger(DynamicDataSourceContextHolder.class);

    /**
     * 使用ThreadLocal维护变量,ThreadLocal为每个使用该变量的线程提供独立的变量副本,
     *  所以每一个线程都可以独立地改变自己的副本,而不会影响其它线程所对应的副本。
     */
    private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<>();

    /**
     * 设置数据源变量
     */
    public static void setDataSourceType(String dataSourceType){
        log.info("切换到{}数据源", dataSourceType);
        CONTEXT_HOLDER.set(dataSourceType);
    }

    /**
     * 获取数据源变量
     */
    public static String getDataSourceType(){
        return CONTEXT_HOLDER.get();
    }

    /**
     * 清空数据源变量
     */
    public static void clearDataSourceType(){
        CONTEXT_HOLDER.remove();
    }

}

3.使用

(1)调用从库1

/**
 * 调用从库1
 */
@Component
@DataSource(value = DataSourceType.BACKONE)
public interface TestOneMapper {

}

(2)调用从库2

/**
 * 调用从库2
 */
@Component
@DataSource(value = DataSourceType.BACKTWO)
public interface TestTwoMapper {

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值