Spring Boot 整合多数据源+Mybatis+druid

1.新建Spring Boot 项目,在pom.xml添加如下依赖

<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-web</artifactId>
		</dependency>

		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-test</artifactId>
			<scope>test</scope>
		</dependency>

		<!-- 加载postgresql驱动 -->
		<dependency>
			<groupId>org.postgresql</groupId>
			<artifactId>postgresql</artifactId>
			<scope>runtime</scope>
		</dependency>
		<!-- 加载jdbc连接数据库 -->
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-jdbc</artifactId>
		</dependency>
		<!-- 加载mybatis jar包 -->
		<dependency>
			<groupId>org.mybatis.spring.boot</groupId>
			<artifactId>mybatis-spring-boot-starter</artifactId>
			<version>1.3.2</version>
		</dependency>
		<!-- aop依赖 jar包 -->
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-aop</artifactId>
		</dependency>

		<!-- alibaba的druid数据库连接池 -->
		<dependency>
			<groupId>com.alibaba</groupId>
			<artifactId>druid-spring-boot-starter</artifactId>
			<version>1.1.9</version>
		</dependency>
		<!-- 分页插件 -->
		<dependency>
			<groupId>com.github.pagehelper</groupId>
			<artifactId>pagehelper-spring-boot-starter</artifactId>
			<version>1.2.5</version>
		</dependency>
		<!--json插件-->
		<dependency>
			<groupId>com.fasterxml.jackson.core</groupId>
			<artifactId>jackson-core</artifactId>
		</dependency>
		<dependency>
			<groupId>com.fasterxml.jackson.core</groupId>
			<artifactId>jackson-databind</artifactId>
		</dependency>
		<dependency>
			<groupId>com.fasterxml.jackson.datatype</groupId>
			<artifactId>jackson-datatype-joda</artifactId>
		</dependency>
		<dependency>
			<groupId>com.fasterxml.jackson.module</groupId>
			<artifactId>jackson-module-parameter-names</artifactId>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-jta-atomikos</artifactId>
		</dependency>

 2.在application.yml文件中添加数据库配置以及服务器端口号等

server:
  port: 8060
spring:
  datasource:
    druid:
      hr:
        #配置监控统计拦截的filters,去掉后监控界面SQL无法进行统计,'wall'用于防火墙
        filters: stat,wall
        driver-class-name: org.postgresql.Driver
        url: jdbc:postgresql://127.0.0.1:5432/pdl_hr_test?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true
        username: postgres
        password: 123456
        #初始化大小
        initial-size: 50
        #最小连接数
        min-idle: 50
        #最大连接数
        max-active: 200
        #获取连接等待超时时间
        max-wait: 60000
        #间隔多久才进行一次检测,检测需要关闭的空闲连接,单位毫秒
        time-between-eviction-runs-millis: 60000
        #一个连接在池中最小生存的时间,单位是毫秒
        min-evictable-idle-time-millis: 30000
        #测试语句是否执行正确
        validation-query: SELECT 'x'
        #指明连接是否被空闲连接回收器(如果有)进行检验.如果检测失败,则连接将被从池中去除.
        test-while-idle: true
        #借出连接时不要测试,否则很影响性能
        test-on-borrow: false
        test-on-return: false
        #打开PSCache,并指定每个连接上PSCache的大小。oracle设为true,mysql设为false。分库分表较多推荐设置为false
        pool-prepared-statements: false
        #与Oracle数据库PSCache有关,再druid下可以设置的比较高
        max-pool-prepared-statement-per-connection-size: 20
      #数据源2
      crm:
        filters: stat,wall
        driver-class-name: org.postgresql.Driver
        url: jdbc:postgresql://127.0.0.1:5432/pdl_crm?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true
        username: postgres
        password: 123456
        initial-size: 50
        min-idle: 50
        max-active: 200
        max-wait: 60000
        time-between-eviction-runs-millis: 60000
        min-evictable-idle-time-millis: 30000
        validation-query: SELECT 'x'
        test-while-idle: true
        test-on-borrow: false
        test-on-return: false
        pool-prepared-statements: false
        max-pool-prepared-statement-per-connection-size: 20


3.新建数据源配置类DataSourceCrmConfig和DataSourceHRConfig读取application.yml中的数据库配置文件

DataSourceHRConfig:

package com.pdl.config;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;

import javax.sql.DataSource;

@Configuration

@MapperScan(basePackages = "com.pdl.dao.hr",sqlSessionTemplateRef = "hrSqlSessionTemplate")
public class DataSourceHRConfig {
    @Bean(name = "hrDataSource")
    @ConfigurationProperties(prefix = "spring.datasource.druid.hr")//application.yml中对应属性的前缀
    @Primary
    public DataSource hrDataSource(){

        return DruidDataSourceBuilder.create().build();
    }

    @Bean(name = "hrSqlSessionFactory")
    @Primary
    public SqlSessionFactory hrsqlSessionFactory(@Qualifier("hrDataSource") DataSource druidDataSource) throws Exception{
        SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
        bean.setDataSource(druidDataSource);
        bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/hr/*.xml"));
        return bean.getObject();
    }

    @Bean(name = "hrTransactionManager")
    @Primary
    public DataSourceTransactionManager hrTransactionManager(@Qualifier("hrDataSource")DataSource druidDataSource){
        return new DataSourceTransactionManager(druidDataSource);
    }

    @Bean(name = "hrSqlSessionTemplate")
    @Primary
    public SqlSessionTemplate hrSqlSessionTemplate(@Qualifier("hrSqlSessionFactory")SqlSessionFactory sqlSessionFactory){
        return new SqlSessionTemplate(sqlSessionFactory);
    }




}

 

DataSourceCrmConfig:

package com.pdl.config;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;

@Configuration
@MapperScan(basePackages = "com.pdl.dao.crm",sqlSessionTemplateRef = "crmSqlSessionTemplate")
public class DataSourceCrmConfig {
    @Bean(name = "crmDataSource")
    @ConfigurationProperties(prefix = "spring.datasource.druid.crm")
    public DruidDataSource crmDataSource(){
        return DruidDataSourceBuilder.create().build();
    }

    @Bean(name = "crmSqlSessionFactory")
    public SqlSessionFactory crmsqlSessionFactory(@Qualifier("crmDataSource") DruidDataSource druidDataSource) throws Exception{
        SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
        bean.setDataSource(druidDataSource);
        bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath:mapper/crm/*.xml"));
        return bean.getObject();
    }

    @Bean(name = "crmTransactionManager")
    public DataSourceTransactionManager crmTransactionManager(@Qualifier("crmDataSource")DruidDataSource druidDataSource){
        return new DataSourceTransactionManager(druidDataSource);
    }

    @Bean(name = "crmSqlSessionTemplate")
    public SqlSessionTemplate hrSqlSessionTemplate(@Qualifier("crmSqlSessionFactory")SqlSessionFactory sqlSessionFactory){
        return new SqlSessionTemplate(sqlSessionFactory);
    }



}

 

最关键的地方就是这块了,一层一层注入,首先创建DataSource,然后创建SqlSessionFactory再创建事务,最后包装到SqlSessionTemplate中。其中需要指定分库的mapper文件地址,以及分库dao层代码

@MapperScan(basePackages = "com.pdl.dao.hr",sqlSessionTemplateRef = "hrSqlSessionTemplate")

这块的注解就是指明了扫描dao层,并且给dao层注入指定的SqlSessionTemplate。所有@Bean都需要按照命名指定正确。

4.在DAO层新建 hr 和 crm 目录,创建AdminMapper和UserMapper接口,AdminMapper在hr目录下,UserMapper放在crm目录下

package com.pdl.dao.hr;

import org.apache.ibatis.annotations.Mapper;

import java.util.HashMap;
import java.util.List;
@Mapper
public interface AdminMapper {

    List<HashMap> selectAdmin();

    void insert(HashMap params);



}
package com.pdl.dao.crm;

import com.pdl.domain.User;
import org.apache.ibatis.annotations.Mapper;

import java.util.HashMap;
import java.util.List;

@Mapper
public interface UserMapper {

    List<User> selectAllUser();

    void insert(HashMap params);


}

5.配置mapper.xml文件,在mapper文件夹下新建hr和crm目录,AdminMapper.xml在hr目录下,UserMapper.xml放在crm目录下

AdminMapper.xml:

<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="com.pdl.dao.hr.AdminMapper" >

    <select id="selectAdmin"  resultType="HashMap">
        SELECT * FROM pdl_admin limit 10 offset 0
    </select>

    <insert id="insert" parameterType="HashMap">
        INSERT INTO public.pdl_admin(
	user_no, user_name, user_pw, stop_flag, emp_id)
	VALUES (#{user_no}, #{user_name}, #{user_pw}, #{stop_flag}, #{emp_id});
    </insert>

</mapper>

UserMapper.xml

<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="com.pdl.dao.crm.UserMapper" >

    <select id="selectAllUser"  resultType="com.pdl.domain.User">
        SELECT * FROM pdl_crm_user
    </select>

    <insert id="insert" parameterType="HashMap">
        INSERT INTO public.pdl_crm_user(
	emp_id, emp_no, center_no, create_time, emp_name)
	VALUES (#{emp_id}, #{emp_no}, #{center_no}, #{create_time}::TIMESTAMP , #{emp_name});
    </insert>

</mapper>

6.Druid连接池监控配置

package com.pdl.config;
 
import com.alibaba.druid.support.http.StatViewServlet;
import com.alibaba.druid.support.http.WebStatFilter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
 
@Configuration
public class DruidConfig {
 
    /**
     * 注册servlet信息,配置监控图
     *
     */
 
    @Bean
    @ConditionalOnMissingBean
    public ServletRegistrationBean druidServlet(){
        ServletRegistrationBean servletRegistrationBean =
            new ServletRegistrationBean(new StatViewServlet(),"/druid/*");
        //白名单
        servletRegistrationBean.addInitParameter("allow","192.168.1.1");
        //IP黑名单(存在共同时,deny优先于allow) : 如果满足deny的话提示:Sorry, you are not permitted to view this page.
        servletRegistrationBean.addInitParameter("deny","127.0.0.1");
        //用于登陆的账号密码
        servletRegistrationBean.addInitParameter("loginUsername","admin");
        servletRegistrationBean.addInitParameter("loginPassword","admin");
        //是否能重置数据
        servletRegistrationBean.addInitParameter("resetEnable","true");
        return servletRegistrationBean;
    }
    /**
     *
     * 注册filter信息,用于拦截
     */
 
    @Bean
    @ConditionalOnMissingBean
    public FilterRegistrationBean filterRegistrationBean(){
        FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
        filterRegistrationBean.setFilter(new WebStatFilter());
        filterRegistrationBean.addUrlPatterns("/*");
        filterRegistrationBean.addInitParameter("exclusions", "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*");
        return filterRegistrationBean;
    }
}

7.在service层中调用dao

package com.pdl.service;

import com.pdl.dao.crm.UserMapper;
import com.pdl.domain.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Service
public class UserService {

    @Autowired
    private UserMapper userMapper;

    public Map selectUser(){
        Map data = new HashMap();
        List<User> userList = userMapper.selectAllUser();
        data.put("recordList",userList);
        data.put("total",userList.size());
        return data;
    }







}

 

8. controller层调用

package com.pdl.controller;

import com.pdl.service.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;

import java.util.Map;

@RestController
public class UserController {

    @Autowired
    private UserService userService;
    @RequestMapping("/selectAllUser")
    @ResponseBody
    private Map selectAllUser(){
        Map data = userService.selectUser();
        return data;
    }



}

 

9.更改Application.java配置

SpringBoot 实现多数据源时,首先要将Spring Boot 自带的 DataSourceAutoConfiguration 禁用,因为它会读取application.properties文件的Spring.datasource.*属性并自动配置单数据源。在@SpringBootApplication注解中添加exclude属性即可。

package com.pdl;

import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.web.servlet.ServletComponentScan;

@SpringBootApplication(
		exclude = {DataSourceAutoConfiguration.class}
)
@ServletComponentScan
public class Application {

	public static void main(String[] args) {
		SpringApplication.run(Application.class, args);
	}

}

到目前为止多数据源已经配置成功,如有哪位小伙伴配置失败的可以给我发邮件1044560183@qq.com

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值