其实配置相当简单,仅供学习参考,如有不足请赐教。
下面给大家看一下主要的几个配置:
1.首先要poi包导入到项目中
org.springframework.boot spring-boot-starter-web org.mybatis.spring.boot mybatis-spring-boot-starter 2.1.1com.alibaba druid 1.1.6mysql mysql-connector-java 5.1.6org.springframework.boot spring-boot-starter-thymeleaf com.github.pagehelper pagehelper 5.1.5
2.dao层的结构及配置
数据源(一)DruidDatasourceConfig.java
package com.huifu.myeinsole.dal;import com.alibaba.druid.pool.DruidDataSource;import com.github.pagehelper.PageInterceptor;import org.apache.ibatis.plugin.Interceptor;import org.apache.ibatis.session.SqlSessionFactory;import org.mybatis.spring.SqlSessionFactoryBean;import org.mybatis.spring.SqlSessionTemplate;import org.mybatis.spring.annotation.MapperScan;import org.springframework.beans.factory.annotation.Autowired;import org.springframework.beans.factory.annotation.Qualifier;import org.springframework.beans.factory.annotation.Value;import org.springframework.context.annotation.Bean;import org.springframework.context.annotation.Configuration;import org.springframework.core.io.Resource;import org.springframework.core.io.support.PathMatchingResourcePatternResolver;import org.springframework.core.io.support.ResourcePatternResolver;import org.springframework.jdbc.datasource.DataSourceTransactionManager;import org.springframework.transaction.PlatformTransactionManager;import org.springframework.transaction.annotation.EnableTransactionManagement;import org.springframework.transaction.support.TransactionTemplate;import javax.sql.DataSource;import java.io.IOException;import java.sql.SQLException;import java.util.ArrayList;import java.util.Arrays;import java.util.List;/** * polarDB 数据库配置 * @author 小A * */@Configuration@EnableTransactionManagement@MapperScan(basePackages = {"com.huifu.myeinsole.dal.mapper","com.huifu.myeinsole.dal.repository"}, sqlSessionTemplateRef = "sqlSessionTemplate")public class DruidDatasourceConfig { //如果用mysql,请更换为:com.mysql.jdbc.Driver @Value("${druid.driverClass:com.mysql.cj.jdbc.Driver}") private String driverClass; @Value("${druid.jdbcUrl}") private String jdbcUrl; @Value("${druid.user}") private String user; @Value("${druid.password}") private String password; @Value("${druid.initialSize:3}") private int initialSize; @Value("${druid.minIdle:3}") private int minIdle; @Value("${druid.maxActive:10}") private int maxActive; @Value("${druid.maxWait:60000}") private long maxWait; @Value("${druid.timeBetweenEvictionRunsMillis:60000}") private long timeBetweenEvictionRunsMillis; @Value("${druid.minEvictableIdleTimeMillis:300000}") private long minEvictableIdleTimeMillis; @Value("${druid.keepAlive:true}") private boolean keepAlive; @Value("${druid.validationQuery:select 1 from dual}") private String validationQuery; @Value("${druid.testWhileIdle:true}") private boolean testWhileIdle; @Value("${druid.testOnBorrow:false}") private boolean testOnBorrow; @Value("${druid.testOnReturn:false}") private boolean testOnReturn; @Value("${druid.poolPreparedStatements:false}") //打开PSCache,并且指定每个连接上PSCache的大小 private boolean poolPreparedStatements; @Value("${druid.maxPoolPreparedStatementPerConnectionSize:20}") private int maxPoolPreparedStatementPerConnectionSize; @Value("${druid.filters:stat,wall,slf4j}") //配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙 private String filters; @Value("${druid.connectionProperties:druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000}") //通过connectProperties属性来打开mergeSql功能;慢SQL记录 private String connectionProperties; @Value("${druid.useGlobalDataSourceStat:true}") //合并多个DruidDataSource的监控数据 private boolean useGlobalDataSourceStat; @Bean(initMethod = "init",destroyMethod = "close") public DataSource dataSource() { DruidDataSource datasource = new DruidDataSource(); datasource.setUrl(jdbcUrl); datasource.setUsername(user); datasource.setPassword(password); datasource.setDriverClassName(driverClass); //configuration datasource.setInitialSize(initialSize); datasource.setMinIdle(minIdle); datasource.setMaxActive(maxActive); datasource.setMaxWait(maxWait); datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis); datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis); datasource.setKeepAlive(keepAlive); datasource.setValidationQuery(validationQuery); datasource.setTestWhileIdle(testWhileIdle); datasource.setTestOnBorrow(testOnBorrow); datasource.setTestOnReturn(testOnReturn); datasource.setPoolPreparedStatements(poolPreparedStatements); datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize); datasource.setUseGlobalDataSourceStat(useGlobalDataSourceStat); try { datasource.setFilters(filters); } catch (SQLException e) { System.err.println("druid configuration initialization filter: " + e); } datasource.setConnectionProperties(connectionProperties); return datasource; } @Bean(value = "sqlSessionTemplate") public SqlSessionTemplate sqlSessionTemplate(@Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory) { SqlSessionTemplate sqlSessionTemplate = new SqlSessionTemplate(sqlSessionFactory); return sqlSessionTemplate; } @Autowired @Qualifier("masterPageInterceptor") PageInterceptor pageInterceptor; @Bean(value = "sqlSessionFactory") public SqlSessionFactory sqlSessionFactory(@Qualifier("dataSource") DataSource dataSource) throws Exception { SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean(); sqlSessionFactoryBean.setDataSource(dataSource); sqlSessionFactoryBean.setTypeAliasesPackage("com.huifu.myeinsole.dal.mapper,com.huifu.myeinsole.dal.repository"); String[] mapperLocations = new String[2]; mapperLocations[0] = "classpath*:com/huifu/myeinsole/dal/mapper/**/*Mapper.xml"; mapperLocations[1] = "classpath*:com/huifu/myeinsole/dal/repository/**/*.xml"; sqlSessionFactoryBean.setMapperLocations(resolveMapperLocations(mapperLocations)); sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageInterceptor}); return sqlSessionFactoryBean.getObject(); } public Resource[] resolveMapperLocations(String[] mapperLocations) { ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); List resources = new ArrayList(); if (mapperLocations != null) { for (String mapperLocation : mapperLocations) { try { Resource[] mappers = resourceResolver.getResources(mapperLocation); resources.addAll(Arrays.asList(mappers)); } catch (IOException e) { // ignore } } } return resources.toArray(new Resource[resources.size()]); } @Bean(value = "transactionTemplate") public TransactionTemplate transactionTemplate(@Qualifier("transactionManager") PlatformTransactionManager transactionManager) { TransactionTemplate transactionTemplate = new TransactionTemplate(); transactionTemplate.setTransactionManager(transactionManager); return transactionTemplate; } @Bean(value = "transactionManager") public PlatformTransactionManager transactionManager(@Qualifier("dataSource") DataSource dataSource) { DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager(); dataSourceTransactionManager.setDataSource(dataSource); return dataSourceTransactionManager; }}
数据源(二)DruidDatasource2Config.java
package com.huifu.myeinsole.dal;import com.alibaba.druid.pool.DruidDataSource;import com.github.pagehelper.PageInterceptor;import org.apache.ibatis.plugin.Interceptor;import org.apache.ibatis.session.SqlSessionFactory;import org.mybatis.spring.SqlSessionFactoryBean;import org.mybatis.spring.SqlSessionTemplate;import org.mybatis.spring.annotation.MapperScan;import org.springframework.beans.factory.annotation.Autowired;import org.springframework.beans.factory.annotation.Qualifier;import org.springframework.beans.factory.annotation.Value;import org.springframework.context.annotation.Bean;import org.springframework.context.annotation.Configuration;import org.springframework.core.io.Resource;import org.springframework.core.io.support.PathMatchingResourcePatternResolver;import org.springframework.core.io.support.ResourcePatternResolver;import org.springframework.jdbc.datasource.DataSourceTransactionManager;import org.springframework.transaction.PlatformTransactionManager;import org.springframework.transaction.annotation.EnableTransactionManagement;import org.springframework.transaction.support.TransactionTemplate;import javax.sql.DataSource;import java.io.IOException;import java.sql.SQLException;import java.util.ArrayList;import java.util.Arrays;import java.util.List;/** * mysql交易库数据源 * @author 小A */@Configuration@EnableTransactionManagement@MapperScan(basePackages = {"com.huifu.myeinsole.dal.transRepository"}, sqlSessionTemplateRef = "mysqlSqlSessionTemplate")public class DruidDatasource2Config { //如果用mysql,请更换为:com.mysql.jdbc.Driver @Value("${druid.driverClass:com.mysql.cj.jdbc.Driver}") private String driverClass; @Value("${druid.trans.jdbcUrl}") private String jdbcUrl; @Value("${druid.trans.user}") private String user; @Value("${druid.trans.password}") private String password; @Value("${druid.initialSize:3}") private int initialSize; @Value("${druid.minIdle:3}") private int minIdle; @Value("${druid.maxActive:10}") private int maxActive; @Value("${druid.maxWait:60000}") private long maxWait; @Value("${druid.timeBetweenEvictionRunsMillis:60000}") private long timeBetweenEvictionRunsMillis; @Value("${druid.minEvictableIdleTimeMillis:300000}") private long minEvictableIdleTimeMillis; @Value("${druid.keepAlive:true}") private boolean keepAlive; @Value("${druid.validationQuery:select 1 from dual}") private String validationQuery; @Value("${druid.testWhileIdle:true}") private boolean testWhileIdle; @Value("${druid.testOnBorrow:false}") private boolean testOnBorrow; @Value("${druid.testOnReturn:false}") private boolean testOnReturn; @Value("${druid.poolPreparedStatements:false}") //打开PSCache,并且指定每个连接上PSCache的大小 private boolean poolPreparedStatements; @Value("${druid.maxPoolPreparedStatementPerConnectionSize:20}") private int maxPoolPreparedStatementPerConnectionSize; @Value("${druid.filters:stat,wall,slf4j}") //配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙 private String filters; @Value("${druid.connectionProperties:druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000}") //通过connectProperties属性来打开mergeSql功能;慢SQL记录 private String connectionProperties; @Value("${druid.useGlobalDataSourceStat:true}") //合并多个DruidDataSource的监控数据 private boolean useGlobalDataSourceStat; @Bean(value = "mysqlDataSource", initMethod = "init",destroyMethod = "close") public DataSource dataSource() { DruidDataSource datasource = new DruidDataSource(); datasource.setUrl(jdbcUrl); datasource.setUsername(user); datasource.setPassword(password); datasource.setDriverClassName(driverClass); //configuration datasource.setInitialSize(initialSize); datasource.setMinIdle(minIdle); datasource.setMaxActive(maxActive); datasource.setMaxWait(maxWait); datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis); datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis); datasource.setKeepAlive(keepAlive); datasource.setValidationQuery(validationQuery); datasource.setTestWhileIdle(testWhileIdle); datasource.setTestOnBorrow(testOnBorrow); datasource.setTestOnReturn(testOnReturn); datasource.setPoolPreparedStatements(poolPreparedStatements); datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize); datasource.setUseGlobalDataSourceStat(useGlobalDataSourceStat); try { datasource.setFilters(filters); } catch (SQLException e) { System.err.println("druid configuration initialization filter: " + e); } datasource.setConnectionProperties(connectionProperties); return datasource; } @Bean(value = "mysqlSqlSessionTemplate") public SqlSessionTemplate sqlSessionTemplate(@Qualifier("mysqlSqlSessionFactory") SqlSessionFactory sqlSessionFactory) { SqlSessionTemplate sqlSessionTemplate = new SqlSessionTemplate(sqlSessionFactory); return sqlSessionTemplate; } @Autowired @Qualifier("masterPageInterceptor") PageInterceptor pageInterceptor; @Bean(value = "mysqlSqlSessionFactory") public SqlSessionFactory sqlSessionFactory(@Qualifier("mysqlDataSource") DataSource dataSource) throws Exception { SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean(); sqlSessionFactoryBean.setDataSource(dataSource); sqlSessionFactoryBean.setTypeAliasesPackage("com.huifu.myeinsole.dal.transRepository"); String[] mapperLocations = new String[1]; mapperLocations[0] = "classpath*:com/huifu/myeinsole/dal/transRepository/*.xml"; sqlSessionFactoryBean.setMapperLocations(resolveMapperLocations(mapperLocations)); sqlSessionFactoryBean.setPlugins(new Interceptor[]{pageInterceptor}); return sqlSessionFactoryBean.getObject(); } public Resource[] resolveMapperLocations(String[] mapperLocations) { ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); List resources = new ArrayList(); if (mapperLocations != null) { for (String mapperLocation : mapperLocations) { try { Resource[] mappers = resourceResolver.getResources(mapperLocation); resources.addAll(Arrays.asList(mappers)); } catch (IOException e) { // ignore } } } return resources.toArray(new Resource[resources.size()]); } @Bean(value = "mysqlTransactionTemplate") public TransactionTemplate transactionTemplate(@Qualifier("mysqlTransactionManager") PlatformTransactionManager transactionManager) { TransactionTemplate transactionTemplate = new TransactionTemplate(); transactionTemplate.setTransactionManager(transactionManager); return transactionTemplate; } @Bean(value = "mysqlTransactionManager") public PlatformTransactionManager transactionManager(@Qualifier("mysqlDataSource") DataSource dataSource) { DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager(); dataSourceTransactionManager.setDataSource(dataSource); return dataSourceTransactionManager; }}
需要注意三个点:
1.两个连接池指定各自的mapper文件扫码路径;
2.数据源配置区分开;
//数据源(一)@MapperScan(basePackages = {"com.huifu.myeinsole.dal.mapper","com.huifu.myeinsole.dal.repository"}, sqlSessionTemplateRef = "sqlSessionTemplate")
//数据源(二)@MapperScan(basePackages = {"com.huifu.myeinsole.dal.transRepository"}, sqlSessionTemplateRef = "mysqlSqlSessionTemplate")
包的路径可以多个哦
3.操作对应数据库时,修改对应的mapper接口及xml文件;
4.逻辑实现时@Autowired注入对应的mapper对象即可;
3.参数配置(此处用properties方式,也可用yml)
# druid datasourcedruid.jdbcUrl=jdbc:mysql://xxxxx/实例名1?zeroDateTimeBehavior=convertToNull&serverTimezone=GMT%2B8&useSSL=falsedruid.user=用户名druid.password=密码# druid datasource2druid.trans.jdbcUrl=jdbc:mysql://xxxxx/实例名2?zeroDateTimeBehavior=convertToNull&serverTimezone=GMT%2B8&useSSL=falsedruid.trans.user=用户名druid.trans.password=密码
4.启动类配置自动扫码数据源配置
package com.huifu.myeinsole.application;import org.springframework.boot.SpringApplication;import org.springframework.boot.autoconfigure.EnableAutoConfiguration;import org.springframework.boot.autoconfigure.SpringBootApplication;import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;import org.springframework.boot.builder.SpringApplicationBuilder;import org.springframework.boot.web.support.SpringBootServletInitializer;import org.springframework.context.annotation.ComponentScan;import org.springframework.context.annotation.PropertySource;/** * @author 小A */@SpringBootApplication@ComponentScan("com.huifu.myeinsole")@PropertySource("classpath:springboot.properties")@EnableAutoConfiguration(exclude = {DataSourceAutoConfiguration.class})public class BootApplication extends SpringBootServletInitializer { public static void main(String[] args) { SpringApplication.run(new Object[]{BootApplication.class}, args); } @Override protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { return application.sources(BootApplication.class); }}
@EnableAutoConfiguration(exclude = {DataSourceAutoConfiguration.class, RedisAutoConfiguration.class}
Java开发相关文章将不定时发布更新,有兴趣的朋友请持续关注交流!
发文不易,如果本文对您有帮助请给个赞吧,谢谢。
重要的事情说三遍:关注》》关注》》关注