SpringBoot2.0 jpa Druid log4jdbc 多数据源 读写分离

最近搭建框架玩,想搭建一个读写分离的东东,查阅了一天的资料,总算是弄出来了现在把代码分享出来,希望能帮到大家
第一步 配置文件

spring:
  thymeleaf:
    prefix: classpath:/templates/  
    suffix: .html
  jpa:
    show-sql: true
    ddl-auto: none  
  datasource:
    primary:
      username: huang
      password: root
      url: jdbc:log4jdbc:mysql://localhost:3306/springboot?useUnicode=true&characterEncoding=UTF-8&useSSL=false
      driver-class-name: net.sf.log4jdbc.DriverSpy
    secondary:
      username: huang
      password: root
      url: jdbc:log4jdbc:mysql://localhost:3306/springboot2?useUnicode=true&characterEncoding=UTF-8&useSSL=false
      driver-class-name: net.sf.log4jdbc.DriverSpy
    type: com.alibaba.druid.pool.DruidDataSource

第二步 做一个Configuration配置的文件 这个在spring里面就是一个xml文件

import com.alibaba.druid.pool.DruidDataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.orm.jpa.EntityManagerFactoryBuilder;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;

import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;

/**
 * @author huanglin wrote on 2017/10/23.
 * 数据库连接属性配置
 */
@ServletComponentScan
@Configuration
public class DruidDBConfig{
    private Logger logger = LoggerFactory.getLogger(DruidDBConfig.class);

    @Value("${spring.datasource.primary.url}")
    private String dbUrl1;

    @Value("${spring.datasource.primary.username}")
    private String username1;

    @Value("${spring.datasource.primary.password}")
    private String password1;

    @Value("${spring.datasource.secondary.username}")
    private String username2;

    @Value("${spring.datasource.secondary.password}")
    private String password2;

    @Value("${spring.datasource.secondary.url}")
    private String dbUrl2;

    @Value("net.sf.log4jdbc.DriverSpy")
    private String driverClassName;

    @Value("5")
    private int initialSize;

    @Value("5")
    private int minIdle;

    @Value("20")
    private int maxActive;

    @Value("60000")
    private int maxWait;

    /**
     * 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
     */
    @Value("60000")
    private int timeBetweenEvictionRunsMillis;
    /**
     * 配置一个连接在池中最小生存的时间,单位是毫秒
     */
    @Value("300000")
    private int minEvictableIdleTimeMillis;

    @Value("SELECT 1 FROM DUAL")
    private String validationQuery;

    @Value("true")
    private boolean testWhileIdle;

    @Value("false")
    private boolean testOnBorrow;

    @Value("false")
    private boolean testOnReturn;

    /**
     * 打开PSCache,并且指定每个连接上PSCache的大小
     */
    @Value("true")
    private boolean poolPreparedStatements;

    @Value("20")
    private int maxPoolPreparedStatementPerConnectionSize;
    /**
     * 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
     */
    @Value("stat,wall,log4j")
    private String filters;
    /**
     * 通过connectProperties属性来打开mergeSql功能;慢SQL记录
     */
    @Value("druid.stat.mergeSql=true;druid.stat.slowSqlMillis=500")
    private String connectionProperties;

    private DruidDataSource getDruidDataSource(String username, String password, String url) {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(username);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);
        //configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        try {
            datasource.setFilters(filters);
        } catch (SQLException e) {
            logger.error("druid configuration initialization filter : {0}", e);
        }
        datasource.setConnectionProperties(connectionProperties);

        return datasource;
    }
    @Bean(name = "primaryDataSource")
    @Qualifier("primaryDataSource")
    @Primary
    public DataSource primarydataSource() {
        return getDruidDataSource(username1, password1, dbUrl1);
    }
    @Bean(name = "secondaryDataSource")
    @Qualifier("secondaryDataSource")
    public DataSource secondaryDataSource() {
        return getDruidDataSource(username2, password2, dbUrl2);
    }
    
    @Bean
    public DataSource dynamicDataSource() {
        DynamicRoutingDataSource dataSource = new DynamicRoutingDataSource();
        dataSource.setDefaultTargetDataSource(primarydataSource());
        Map<Object, Object> dataSourceMap = new HashMap<>(4);
        dataSourceMap.put(DataSourceKey.PRIMARYDATASOURCE, primarydataSource());
        dataSourceMap.put(DataSourceKey.SECONDARYDATASOURCE, secondaryDataSource());
        dataSource.setTargetDataSources(dataSourceMap);
        return dataSource;
    }
    /**
     * 实体文件
     * @return
     */
    @Bean(name = "entityManagerFactory")
    public LocalContainerEntityManagerFactoryBean emf() {
        LocalContainerEntityManagerFactoryBean emf = new LocalContainerEntityManagerFactoryBean();
        emf.setDataSource(dynamicDataSource());
        //这里一定要是你自己App实体类的位置
        emf.setPackagesToScan(new String[]{"dsb.app.*.model"});
        emf.setJpaVendorAdapter(new HibernateJpaVendorAdapter());
        return emf;
    }
  
    /**
     * 事务关管理
     * @return
     */
    @Bean(name = "transactionManager")
    public JpaTransactionManager transactionManager() {
        JpaTransactionManager tm =new JpaTransactionManager();
        EntityManagerFactory e=emf().getNativeEntityManagerFactory();
        tm.setEntityManagerFactory(e);
        return tm;
    }

}

第三步 创建一个主从的枚举,这里可以有多个从数据库

public enum DataSourceKey {
	PRIMARYDATASOURCE,
	SECONDARYDATASOURCE
}

第四步 从库控制器

import org.apache.log4j.Logger;

/**
 * @author huanglin
 * @version 1.0
 */
public class DynamicDataSourceContextHolder {
    private static final Logger LOG = Logger.getLogger(DynamicDataSourceContextHolder.class);
    private static final ThreadLocal<DataSourceKey> currentDatesource = new ThreadLocal<DataSourceKey>();

    /**
     * 清除当前数据源
     */
    public static void clear() {
        currentDatesource.remove();
    }

    /**
     * 获取当前使用的数据源
     *
     * @return 当前使用数据源的ID
     */
    public static DataSourceKey get() {
        return currentDatesource.get();
    }

    /**
     * 设置当前使用的数据源
     *
     * @param value 需要设置的数据源ID
     */
    public static void set(DataSourceKey value) {
        currentDatesource.set(value);
    }

    /**
     * 设置从从库读取数据
     * 这里可以随便切换不同的从库
     */
    public static void setSlave() {
        DynamicDataSourceContextHolder.set(DataSourceKey.SECONDARYDATASOURCE);
    }
}

第五步 数据库监控器

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

import dsb.app.common.aop.HttpAspect;

public class DynamicRoutingDataSource extends AbstractRoutingDataSource {
	private final  static org.slf4j.Logger logger= org.slf4j.LoggerFactory.getLogger(HttpAspect.class);
    @Override
    protected Object determineCurrentLookupKey() {
    	if("SECONDARYDATASOURCE".equals(DynamicDataSourceContextHolder.get()+"")) {
    		logger.info("当前数据源:从数据库");
    	}else if(null==DynamicDataSourceContextHolder.get()){
    		
    	}else {
    		logger.info("当前数据源:主数据库");
    	}
        return DynamicDataSourceContextHolder.get();
    }
}

第六步 自定义注解

@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface TargetDataSource {
    DataSourceKey dataSourceKey() default DataSourceKey.PRIMARYDATASOURCE;
}

第七步 AOP 拦截负责对打上自定义注解service切换数据库

import org.apache.log4j.Logger;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;

import dsb.app.common.databaseConfig.DataSourceKey;
import dsb.app.common.databaseConfig.DynamicDataSourceContextHolder;
import dsb.app.common.databaseConfig.TargetDataSource;

import java.lang.reflect.Method;

@Aspect
@Order(-1)
@Component
public class DynamicDataSourceAspect {
    private static final Logger LOG = Logger.getLogger(DynamicDataSourceAspect.class);
    @Pointcut("execution(public * dsb.app.*.serviceimpl.*.*(..))")
    public void pointCut() {
    }

    /**
     * 执行方法前更换数据源
     * @param joinPoint        切点
     * @param targetDataSource 动态数据源
     */
    @Before("@annotation(targetDataSource)")
    public void doBefore(JoinPoint joinPoint, TargetDataSource targetDataSource) {
        DataSourceKey dataSourceKey = targetDataSource.dataSourceKey();
        if (dataSourceKey == DataSourceKey.SECONDARYDATASOURCE) {
            LOG.info(String.format("设置数据源为  %s", DataSourceKey.SECONDARYDATASOURCE));
            DynamicDataSourceContextHolder.set(DataSourceKey.SECONDARYDATASOURCE);
        } else {
            LOG.info(String.format("使用默认数据源  %s", DataSourceKey.PRIMARYDATASOURCE));
            DynamicDataSourceContextHolder.set(DataSourceKey.PRIMARYDATASOURCE);
        }
    }
    /**
     * 执行方法后清除数据源设置
     * @param joinPoint        切点
     * @param targetDataSource 动态数据源
     */
    @After("@annotation(targetDataSource)")
    public void doAfter(JoinPoint joinPoint, TargetDataSource targetDataSource) {
        LOG.info(String.format("当前数据源  %s  执行清理方法", targetDataSource.dataSourceKey()));
        DynamicDataSourceContextHolder.clear();
    }

    @Before(value = "pointCut()")
    public void doBeforeWithSlave(JoinPoint joinPoint) {
        MethodSignature methodSignature = (MethodSignature) joinPoint.getSignature();
        //获取当前切点方法对象
        Method method = methodSignature.getMethod();
        if (method.getDeclaringClass().isInterface()) {//判断是否为借口方法
            try {
                //获取实际类型的方法对象
                method = joinPoint.getTarget().getClass()
                        .getDeclaredMethod(joinPoint.getSignature().getName(), method.getParameterTypes());
            } catch (NoSuchMethodException e) {
                LOG.error("方法不存在!", e);
            }
        }
        if (null == method.getAnnotation(TargetDataSource.class)) {
            DynamicDataSourceContextHolder.setSlave();
        }
    }
}

最后一步 在自己的service上面标上注解

评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值