spring+mybatis 实现多数据源切换

13 篇文章 0 订阅
8 篇文章 0 订阅

背景

相信大家在开发工作中肯定遇到过一个应用可能需要连接两个或以上的数据源进行数据操作,又或者数据库单表数据量过大,导致查询很慢时,一般都会考虑分库分表,这时候也会涉及到多库数据源操作。下面讲解利用spring+mybatis 实现多数据源切换,话不多说直接上代码。

jdbc和log4j的配置

log4j.properties

#定义输出格式
ConversionPattern=%d %-5p [%t] %c - %m%n

log4j.rootLogger=DEBUG,Console
log4j.logger.com.cnblogs.lzrabbit=DEBUG
log4j.logger.org.springframework=ERROR
log4j.logger.org.mybatis=ERROR
log4j.logger.org.apache.ibatis=ERROR
log4j.logger.org.quartz=ERROR
log4j.logger.org.apache.axis2=ERROR
log4j.logger.org.apache.axiom=ERROR
log4j.logger.org.apache=ERROR
log4j.logger.httpclient=ERROR
#log4j.additivity.org.springframework=false
#Console 
log4j.appender.Console=org.apache.log4j.ConsoleAppender
log4j.appender.Console.Threshold=DEBUG  
log4j.appender.Console.Target=System.out  
log4j.appender.Console.layout=org.apache.log4j.PatternLayout  
log4j.appender.Console.layout.ConversionPattern=${ConversionPattern}
#log4j.appender.Console.encoding=UTF-8

#org.apache.log4j.DailyRollingFileAppender
log4j.appender.DailyFile=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DailyFile.DatePattern='.'yyyy-MM-dd'.log'
log4j.appender.DailyFile.File=${myApp.root}/logs/daily.log
log4j.appender.DailyFile.Append=true
log4j.appender.DailyFile.Threshold=DEBUG
log4j.appender.DailyFile.layout=org.apache.log4j.PatternLayout
log4j.appender.DailyFile.layout.ConversionPattern=${ConversionPattern}
log4j.appender.DailyFile.encoding=UTF-8

#    %c 输出日志信息所属的类的全名 
#    %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy-MM-dd HH:mm:ss},输出类似:2002-10-18- 22:10:28
#    %f 输出日志信息所属的类的类名 
#    %l 输出日志事件的发生位置,即输出日志信息的语句处于它所在的类的第几行 
#    %m 输出代码中指定的信息,如log(message)中的message 
#    %n 输出一个回车换行符,Windows平台为“rn”,Unix平台为“n” 
#    %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL。如果是调用debug()输出的,则为DEBUG,依此类推 
#    %r 输出自应用启动到输出该日志信息所耗费的毫秒数 
#    %t 输出产生该日志事件的线程名

jdbc.properties

#============================================================================
# MySQL
#============================================================================
jdbc.mysql.driver=com.mysql.jdbc.Driver
jdbc.mysql.url=jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true
jdbc.mysql.username=root
jdbc.mysql.password=root

#============================================================================
# MS SQL Server
#============================================================================
#jdbc.sqlserver.driver=com.microsoft.sqlserver.jdbc.SQLServerDriver
#jdbc.sqlserver.url=jdbc:sqlserver://127.0.0.1:1433;database=test;
#jdbc.sqlserver.username=sa
#jdbc.sqlserver.password=sa

#============================================================================
# MS SQL Server (JTDS)
#============================================================================
jdbc.sqlserver.driver=net.sourceforge.jtds.jdbc.Driver
jdbc.sqlserver.url=jdbc:jtds:sqlserver://127.0.0.1:1433/test
jdbc.sqlserver.username=sa
jdbc.sqlserver.password=sa


#============================================================================
# 通用配置
#============================================================================
jdbc.initialSize=5
jdbc.minIdle=5
jdbc.maxIdle=20
jdbc.maxActive=100
jdbc.maxWait=100000
jdbc.defaultAutoCommit=false
jdbc.removeAbandoned=true
jdbc.removeAbandonedTimeout=600
jdbc.testWhileIdle=true
jdbc.timeBetweenEvictionRunsMillis=60000
jdbc.numTestsPerEvictionRun=20
jdbc.minEvictableIdleTimeMillis=300000

多数据源时Spring配置文件

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
       xmlns:context="http://www.springframework.org/schema/context"
       xmlns:aop="http://www.springframework.org/schema/aop"
       xsi:schemaLocation="http://www.springframework.org/schema/beans
    http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
    http://www.springframework.org/schema/context
    http://www.springframework.org/schema/context/spring-context-3.0.xsd
      http://www.springframework.org/schema/aop
          http://www.springframework.org/schema/aop/spring-aop-3.0.xsd">
    <bean id="propertyConfigurer" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
        <property name="location" value="classpath:jdbc.properties"/>
    </bean>
    <bean id="sqlServerDataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
        <property name="driverClassName" value="${jdbc.sqlserver.driver}"/>
        <property name="url" value="${jdbc.sqlserver.url}"/>
        <property name="username" value="${jdbc.sqlserver.username}"/>
        <property name="password" value="${jdbc.sqlserver.password}"/>
        <property name="initialSize" value="${jdbc.initialSize}"/>
        <property name="minIdle" value="${jdbc.minIdle}"/>
        <property name="maxIdle" value="${jdbc.maxIdle}"/>
        <property name="maxActive" value="${jdbc.maxActive}"/>
        <property name="maxWait" value="${jdbc.maxWait}"/>
        <property name="defaultAutoCommit" value="${jdbc.defaultAutoCommit}"/>
        <property name="removeAbandoned" value="${jdbc.removeAbandoned}"/>
        <property name="removeAbandonedTimeout" value="${jdbc.removeAbandonedTimeout}"/>
        <property name="testWhileIdle" value="${jdbc.testWhileIdle}"/>
        <property name="timeBetweenEvictionRunsMillis" value="${jdbc.timeBetweenEvictionRunsMillis}"/>
        <property name="numTestsPerEvictionRun" value="${jdbc.numTestsPerEvictionRun}"/>
        <property name="minEvictableIdleTimeMillis" value="${jdbc.minEvictableIdleTimeMillis}"/>
    </bean>
    <bean id="mySqlDataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
        <property name="driverClassName" value="${jdbc.mysql.driver}"/>
        <property name="url" value="${jdbc.mysql.url}"/>
        <property name="username" value="${jdbc.mysql.username}"/>
        <property name="password" value="${jdbc.mysql.password}"/>
        <property name="initialSize" value="${jdbc.initialSize}"/>
        <property name="minIdle" value="${jdbc.minIdle}"/>
        <property name="maxIdle" value="${jdbc.maxIdle}"/>
        <property name="maxActive" value="${jdbc.maxActive}"/>
        <property name="maxWait" value="${jdbc.maxWait}"/>
        <property name="defaultAutoCommit" value="${jdbc.defaultAutoCommit}"/>
        <property name="removeAbandoned" value="${jdbc.removeAbandoned}"/>
        <property name="removeAbandonedTimeout" value="${jdbc.removeAbandonedTimeout}"/>
        <property name="testWhileIdle" value="${jdbc.testWhileIdle}"/>
        <property name="timeBetweenEvictionRunsMillis" value="${jdbc.timeBetweenEvictionRunsMillis}"/>
        <property name="numTestsPerEvictionRun" value="${jdbc.numTestsPerEvictionRun}"/>
        <property name="minEvictableIdleTimeMillis" value="${jdbc.minEvictableIdleTimeMillis}"/>
    </bean>
    <bean id="multipleDataSource" class="com.cnblogs.lzrabbit.MultipleDataSource">
        <property name="defaultTargetDataSource" ref="mySqlDataSource"/>
        <property name="targetDataSources">
            <map>
                <entry key="mySqlDataSource" value-ref="mySqlDataSource"/>
                <entry key="sqlServerDataSource" value-ref="sqlServerDataSource"/>
            </map>
        </property>
    </bean>
    <bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
        <property name="dataSource" ref="multipleDataSource"/>
    </bean>

    <!-- mybatis.spring自动映射 -->
    <bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
        <property name="basePackage" value="com.cnblogs.lzrabbit"/>
    </bean>

    <!-- 自动扫描,多个包以 逗号分隔 -->
    <context:component-scan base-package="com.cnblogs.lzrabbit"/>
    <aop:aspectj-autoproxy/>
</beans>

Java代码编写

MultipleDataSource 多数据源配置类

package com.xxx.gfw.pubfound;

import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p>自定义多数据源配置类  
*
* @ClassName: MultipleDataSource
* @date 2018年5月18日  下午4:47:20
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
public class MultipleDataSource extends AbstractRoutingDataSource {

    private static final ThreadLocal<String> dataSourceKey = new InheritableThreadLocal<String>();

    public static void setDataSourceKey(String dataSource) {
        dataSourceKey.set(dataSource);
    }

    @Override
    protected Object determineCurrentLookupKey() {
        return dataSourceKey.get();
    }
}

自定义注解DataSourceType

package com.xxx.pubfound.aop.anntion;

import java.lang.annotation.*;

/***
 * 
* Project Name:gfw-base-common-service
* <p>自定义数据源类型注解,标志当前的dao接口使用的数据源类型
*
* @ClassName: DataSourceType
* @date 2018年5月18日  下午5:09:49
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@Target({ ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented 
public @interface DataSourceType {
    String value() default "dataSource";
}

两个service层分别加上DataSourceType 注解

ProvinceServiceImpl.java

package com.xxx.pubfound.service;

import java.util.ArrayList;
import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.xxx.pubfound.aop.anntion.DataSourceType;
import com.xxx.pubfound.dao.SysProvinceDao;
import com.xxx.pubfound.dto.SysProvinceDTO;
import com.xxx.pubfound.struct.SysProvinceListStruct;
import com.xxx.pubfound.struct.SysProvinceStruct;
import com.xxx.rpc.api.AbstractRpcService;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p> 省份服务层实现类 
*
* @ClassName: ProvinceServiceImpl
* @date 2018年5月18日  下午6:29:35
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@DataSourceType(value="gfwDataSource")
@Service
public class ProvinceServiceImpl extends AbstractRpcService implements ProvinceService {

    @Autowired
    private SysProvinceDao sysProvinceDao;

    @Override
    public SysProvinceListStruct getProvinceList() {

        List<SysProvinceDTO> list = sysProvinceDao.getProvinceList();

        return beanToStruct(list);
    }

    /***
     * 
    * Project Name: gfw-public-foundation-impl
    * <p>将dto对象封装struct对象 
    *
    * @author youqiang.xiong
    * @date 2018年5月28日  下午3:31:42
    * @version v1.0
    * @since 
    * @param provinceList
    *           省份列表dto
    * @return  省份列表struct
     */
    private SysProvinceListStruct beanToStruct(List<SysProvinceDTO> provinceList){

        if(provinceList == null || provinceList.size() == 0){
            return null;
        }

        List<SysProvinceStruct> resultList = new ArrayList<SysProvinceStruct>();
        for(SysProvinceDTO dto:provinceList){
            SysProvinceStruct struct = new SysProvinceStruct();
            struct.provinceId = dto.getProvinceId();
            struct.provinceName = dto.getProvinceName();
            resultList.add(struct);
        }

        SysProvinceListStruct rsStruct = new SysProvinceListStruct(resultList);
        return rsStruct;
    }

}

DefaultExceptionCollector.java

package com.xxx.pubfound.service;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.alibaba.druid.pool.DruidDataSource;
import com.github.pagehelper.PageInfo;
import com.xxx.pubfound.aop.anntion.DataSourceType;
import com.xxx.pubfound.dao.PfExceptionLogDao;
import com.xxx.pubfound.dto.NotifyLevelEnum;
import com.xxx.pubfound.entity.PfExceptionLog;
import com.xxx.pubfound.struct.PagedPfExceptionLogStruct;
import com.xxx.pubfound.struct.PfExceptionLogStruct;
import com.xxx.rpc.api.AbstractRpcService;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p>异常采集
*
* @ClassName: DefaultExceptionCollector
* @date 2018年5月28日  下午8:01:43
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@DataSourceType(value="dataSource")
@Service
public class DefaultExceptionCollector extends AbstractRpcService implements ExceptionCollector {

    @Autowired
    PfExceptionLogDao pfExceptionLogDao;

    @Autowired
    DruidDataSource dataSource;

    @Override
    public void collect(long reqTime, String exceptionClass, String stackTrace, int resultCode, String environment,
            String nodeNameEn) {

        //FIXME 采集异常!!
        //TODO luoshan
        System.out.println(dataSource.getConnectProperties());
        PfExceptionLog exceptionLog = new PfExceptionLog();
        exceptionLog.setEnvironment(environment);
        exceptionLog.setExceptionClass(exceptionClass);
        exceptionLog.setExceptionTime(new Date(reqTime));
        exceptionLog.setResultCode(resultCode);
        exceptionLog.setServiceName(nodeNameEn);
        exceptionLog.setStackTrace(stackTrace);
        pfExceptionLogDao.insert(exceptionLog);
        System.out.println("Exception ex:" + exceptionClass);
        System.out.println("Exception ex:" + resultCode);
    }

    @Override
    public void collectNotify(long reqTime, String exceptionClass, String stackTrace, int resultCode,
            String environment, String nodeNameEn, NotifyLevelEnum level) {
        try{
            this.collect(reqTime, exceptionClass, stackTrace, resultCode, environment, nodeNameEn);
        }catch(Exception ex){
            ex.printStackTrace();
        }
        //FIXME  根据不停的level 分别执行策略 start
        if(level.compareTo(NotifyLevelEnum.WARN) == 0){
            //发邮件!
        }else if(level.compareTo(NotifyLevelEnum.ERROR) == 0){
            //发邮件,1/3概率 发短信!
        }else if(level.compareTo(NotifyLevelEnum.FATAL) == 0){
            //发邮件,并且要发短信!
        }
        //FIXME  根据不停的level 分别执行策略 end
    }


    /**
     * 分页获取异常日志列表
     *
     * @param pageNo      页码
     * @param size        每页数据量
     * @param serviceName
     * @param beginTime
     * @param endTime
     *
     * @return
     */
//  @Override
//  public PagedPfExceptionLogStruct queryExceptionLogList(int pageNo, int size, String serviceName, Long beginTime, Long endTime) {
//      PageHelper.startPage(pageNo , size);
//      Date beginTimeDate = beginTime == null || beginTime <= 0 ? null : new Date(beginTime);
//      Date endTimeDate = endTime == null || beginTime <= 0 ? null : new Date(endTime);
//      List<PfExceptionLog> list = pfExceptionLogDao.selectPfExceptionLogList(serviceName , beginTimeDate , endTimeDate);
//
//      List<PfExceptionLogStruct> structList = new ArrayList<>();
//      if (list != null || !list.isEmpty()) {
//          for (PfExceptionLog pfExceptionLog: list) {
//              structList.add(entityToStruct(pfExceptionLog));
//          }
//      }
//      PageInfo<PfExceptionLogStruct> page = new PageInfo<>(structList);
//      PagedPfExceptionLogStruct result = new PagedPfExceptionLogStruct(page.getPageNum(), page.getTotal(), page.getPages(), page.getList());
//      return result;
//  }

        @Override
    public PagedPfExceptionLogStruct queryExceptionLogList(int pageNo, int size, String serviceName, Long beginTime, Long endTime) {
        Date beginTimeDate = beginTime == null || beginTime <= 0 ? null : new Date(beginTime);
        Date endTimeDate = endTime == null || beginTime <= 0 ? null : new Date(endTime);
        int offset = pageNo < 1 ? 0 : (pageNo - 1) * size;
        List<PfExceptionLog> list = pfExceptionLogDao.selectPfExceptionLogList(offset , size , serviceName , beginTimeDate , endTimeDate);
        List<PfExceptionLogStruct> structList = new ArrayList<>();
        if (list != null || !list.isEmpty()) {
            for (PfExceptionLog pfExceptionLog: list) {
                structList.add(entityToStruct(pfExceptionLog));
            }
        }
        int total = pfExceptionLogDao.selectPfExceptionLogListCount(serviceName , beginTimeDate , endTimeDate);
        int pages = total % size == 0 ? total/size : total/size + 1;

        PageInfo<PfExceptionLogStruct> page = new PageInfo<>(structList);
        PagedPfExceptionLogStruct result = new PagedPfExceptionLogStruct(pageNo, total, pages, page.getList());
        return result;
    }

    private PfExceptionLogStruct entityToStruct(PfExceptionLog pfExceptionLog) {
        if (pfExceptionLog == null) {
            return null;
        }
        PfExceptionLogStruct pfExceptionLogStruct = new PfExceptionLogStruct();
        pfExceptionLogStruct.id = pfExceptionLog.getId();
        pfExceptionLogStruct.environment = pfExceptionLog.getEnvironment();
        pfExceptionLogStruct.exceptionClass = pfExceptionLog.getExceptionClass();
        pfExceptionLogStruct.exceptionTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(pfExceptionLog.getExceptionTime());
        pfExceptionLogStruct.resultCode = pfExceptionLog.getResultCode();
        pfExceptionLogStruct.serviceName = pfExceptionLog.getServiceName();
        pfExceptionLogStruct.stackTrace = pfExceptionLog.getStackTrace();
        return pfExceptionLogStruct;
    }

    /**
     * 根据异常日志id 获取异常日志详情
     *
     * @param id 异常日志id
     *
     * @return
     */
    @Override
    public PfExceptionLogStruct queryExceptionLogById(int id) {
        PfExceptionLog pfExceptionLog = pfExceptionLogDao.selectByPrimaryKey(id);
        return entityToStruct(pfExceptionLog);
    }
}

MultipleDataSourceAop.java多数据源自动切换切面类

package com.xxx.pubfound.aop;

import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import com.shangde.gfw.util.proxy.ProxyUtil;
import com.xxx.pubfound.MultipleDataSource;
import com.xxx.pubfound.aop.anntion.DataSourceType;

/***
 * 
* Project Name:gfw-public-foundation-impl
* <p>多数据源自动切换通知类(拦截com.xxx.pubfound.dao中所有的类中的方法)<br>
*  首先判断当前类是否被该DataSourceType注解进行注释,如果没有采用默认的uam数据源配置;<br>
*  如果有,则读取注解中的value值,将数据源切到value指定的数据源
* @ClassName: MultipleDataSourceAspectAdvice
* @date 2018年5月18日  下午5:13:51
*
* @author youqiang.xiong
* @version 1.0
* @since
 */
@Component
@Aspect
public class MultipleDataSourceAop {

    private final Logger logger = LoggerFactory.getLogger(getClass());


    /***
     * 
     * Project Name gfw-public-foundation-impl
     * <p>
     * 拦截 pubfound.service中所有的方法,根据情况进行数据源切换
     *
     * @author youqiang.xiong
     * @date 2018年5月18日 下午5:49:48
     * @version v1.0
     * @since
     * @param pjp
     *            连接点
     * @throws Throwable
     *             抛出异常
     */
    @Before("execution(* com.xxx.pubfound.service.*.*(..))")
    public void changeDataSource(JoinPoint joinPoint) throws Throwable {

        // 拦截的实体类,就是当前正在执行的service
        Object target = ProxyUtil.getTarget(joinPoint.getTarget());
        if(target.getClass().isAnnotationPresent(DataSourceType.class)){
            DataSourceType dataSourceType = target.getClass().getAnnotation(DataSourceType.class);
            String type = dataSourceType.value();
            logger.info("数据源切换至--->{}",type);
            MultipleDataSource.setDataSourceKey(type);
        }else{
            logger.info("此{}不涉及数据源操作.",target.getClass());
        }

    }
}

以上就是通过spring aop 切面实现多数据源自动切换的代码和配置,请根据各各自实际情况稍作修改.

评论 6
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值