在一个web工程中,经常需要访问多个不同的数据源,因此需要进行多数据源配置。以Springboot构建的web工程为例,进行了对数据源的配置。
首先在application-prod.properties文件中定义访问不同数据源的url和账户密码,如下(以两个数据源为例,sqlserver有些差别):
#要连接的数据源1 db1
spring.datasource.primary.url=jdbc\:mysql\://localhost\:3306/db1?useUnicode\=true&characterEncoding\=utf-8
spring.datasource.primary.username=root
spring.datasource.primary.password=******
spring.datasource.primary.driveClassName=com.mysql.jdbc.Driver
#要连接的数据源2 db2
spring.datasource.secondary.url=jdbc\:mysql\://localhost\:3306/db2?useUnicode\=true&characterEncoding\=utf-8
spring.datasource.secondary.username=root
spring.datasource.secondary.password=******
spring.datasource.secondary.driveClassName=com.mysql.jdbc.Driver
然后,分别对两个数据源进行配置,使用Druid做数据库连接池,并实现监控功能,因此需要引入Druid,并对其进行配置,配置成功后可以实现对整个工程访问数据库的监控。首先要做的当然是在pom文件中引入druid相关的jar,也就是添加druid依赖,应该是在springboot2.0之后将druid添加了进来,依赖如下:
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.9</version>
</dependency>
由于目前spring Boot中默认支持的连接池有dbcp,dbcp2,tomcat,hikari三种连接池,因此需要进行配置信息定制,因此在文件application-prod.properties文件中添加对druid的相关配置,如下:
spring.datasource.initialSize=5
spring.datasource.minIdle=5
spring.datasource.maxActive=20
spring.datasource.maxWait=60000
spring.datasource.timeBetweenEvictionRunsMillis=60000
spring.datasource.minEvictableIdleTimeMillis=300000
spring.datasource.validationQuery=SELECT 1 FROM DUAL
spring.datasource.testWhileIdle=true
spring.datasource.testOnBorrow=false
spring.datasource.testOnReturn=false
spring.datasource.poolPreparedStatements=true
spring.datasource.maxPoolPreparedStatementPerConnectionSize=20
spring.datasource.filters=stat,wall,log4j
spring.datasource.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
spring.mvc.static-path-pattern=/static/**
配置信息搞好以后,需要Config文件将properties文件中设置的参数和监控相关的参数在工程启动的时候应用起来,配置文件如下:
public class DruidDBConfiguration {
@Value("${spring.datasource.initialSize}")
private int initialSize;
@Value("${spring.datasource.minIdle}")
private int minIdle;
@Value("${spring.datasource.maxActive}")
private int maxActive;
@Value("${spring.datasource.maxWait}")
private int maxWait;
@Value("${spring.datasource.timeBetweenEvictionRunsMillis}")
private int timeBetweenEvictionRunsMillis;
@Value("${spring.datasource.minEvictableIdleTimeMillis}")
private int minEvictableIdleTimeMillis;
@Value("${spring.datasource.validationQuery}")
private String validationQuery;
@Value("${spring.datasource.testWhileIdle}")
private boolean testWhileIdle;
@Value("${spring.datasource.testOnBorrow}")
private boolean testOnBorrow;
@Value("${spring.datasource.testOnReturn}")
private boolean testOnReturn;
@Value("${spring.datasource.poolPreparedStatements}")
private boolean poolPreparedStatements;
@Value("${spring.datasource.maxPoolPreparedStatementPerConnectionSize}")
private int maxPoolPreparedStatementPerConnectionSize;
public int getInitialSize() {
return initialSize;
}
public void setInitialSize(int initialSize) {
this.initialSize = initialSize;
}
public int getMinIdle() {
return minIdle;
}
public void setMinIdle(int minIdle) {
this.minIdle = minIdle;
}
public int getMaxActive() {
return maxActive;
}
public void setMaxActive(int maxActive) {
this.maxActive = maxActive;
}
public int getMaxWait() {
return maxWait;
}
public void setMaxWait(int maxWait) {
this.maxWait = maxWait;
}
public int getTimeBetweenEvictionRunsMillis() {
return timeBetweenEvictionRunsMillis;
}
public void setTimeBetweenEvictionRunsMillis(int timeBetweenEvictionRunsMillis) {
this.timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
}
public int getMinEvictableIdleTimeMillis() {
return minEvictableIdleTimeMillis;
}
public void setMinEvictableIdleTimeMillis(int minEvictableIdleTimeMillis) {
this.minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
}
public String getValidationQuery() {
return validationQuery;
}
public void setValidationQuery(String validationQuery) {
this.validationQuery = validationQuery;
}
public boolean isTestWhileIdle() {
return testWhileIdle;
}
public void setTestWhileIdle(boolean testWhileIdle) {
this.testWhileIdle = testWhileIdle;
}
public boolean isTestOnBorrow() {
return testOnBorrow;
}
public void setTestOnBorrow(boolean testOnBorrow) {
this.testOnBorrow = testOnBorrow;
}
public boolean isTestOnReturn() {
return testOnReturn;
}
public void setTestOnReturn(boolean testOnReturn) {
this.testOnReturn = testOnReturn;
}
public boolean isPoolPreparedStatements() {
return poolPreparedStatements;
}
public void setPoolPreparedStatements(boolean poolPreparedStatements) {
this.poolPreparedStatements = poolPreparedStatements;
}
public int getMaxPoolPreparedStatementPerConnectionSize() {
return maxPoolPreparedStatementPerConnectionSize;
}
public void setMaxPoolPreparedStatementPerConnectionSize(int maxPoolPreparedStatementPerConnectionSize) {
this.maxPoolPreparedStatementPerConnectionSize = maxPoolPreparedStatementPerConnectionSize;
}
public String getFilters() {
return filters;
}
public void setFilters(String filters) {
this.filters = filters;
}
public String getConnectionProperties() {
return connectionProperties;
}
public void setConnectionProperties(String connectionProperties) {
this.connectionProperties = connectionProperties;
}
@Value("${spring.datasource.filters}")
private String filters;
@Value("{spring.datasource.connectionProperties}")
private String connectionProperties;
@Bean //声明其为Bean实例
@Primary //在同样的DataSource中,首先使用被标注的DataSource
public DataSource dataSource(){
DruidDataSource datasource = new DruidDataSource();
//configuration
datasource.setInitialSize(initialSize);
datasource.setMinIdle(minIdle);
datasource.setMaxActive(maxActive);
datasource.setMaxWait(maxWait);
datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
datasource.setValidationQuery(validationQuery);
datasource.setTestWhileIdle(testWhileIdle);
datasource.setTestOnBorrow(testOnBorrow);
datasource.setTestOnReturn(testOnReturn);
datasource.setPoolPreparedStatements(poolPreparedStatements);
datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
try {
datasource.setFilters(filters);
} catch (SQLException e) {
System.out.println("druid configuration initialization filter"+e);
e.printStackTrace();
}
datasource.setConnectionProperties(connectionProperties);
return datasource;
}
@Bean
public ServletRegistrationBean DruidStatViewServlet2(){
ServletRegistrationBean servletRegistrationBean = new ServletRegistrationBean(new StatViewServlet(),"/druid/*");
servletRegistrationBean.addInitParameter("allow", "127.0.0.1");//ip地址需要修改!!!必须要修改
servletRegistrationBean.addInitParameter("deny", "10.100.8.38");
servletRegistrationBean.addInitParameter("loginUsername", "administrator");
servletRegistrationBean.addInitParameter("loginPassword", "123456");
//能否重置数据
servletRegistrationBean.addInitParameter("resetEnable", "false");
return servletRegistrationBean;
}
@Bean
public FilterRegistrationBean druidStatFilter2(){
FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean(new WebStatFilter());
filterRegistrationBean.addUrlPatterns("/*");
//添加不需要忽略的格式信息
filterRegistrationBean.addInitParameter("exclusions","*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*");
return filterRegistrationBean;
}
}
这样之后,druid可以作为数据库连接池使用,并且能够提供监控功能,接下来需要配置我们添加的两个数据源。两个配置类分别为PrimaryDataSourceConfig和secondaryDataSourceConfig
@Configuration
@MapperScan(basePackages = {"**.**.******.Dao.********Dao"}, sqlSessionFactoryRef = "primarySqlSessionFactory")
public class PrimaryDataSourceConfig {
@Bean(name = "primaryDataSource")
@ConfigurationProperties(prefix = "spring.datasource.primary")
public DataSource primaryDataSource(){
return DruidDataSourceBuilder.create().build();
}
// 创建该数据源的事务管理
@Primary
@Bean(name = "primaryTransactionManager")
public DataSourceTransactionManager primaryTransactionManager() throws SQLException {
return new DataSourceTransactionManager(primaryDataSource());
}
@Primary
@Bean(name = "primarySqlSessionFactory")
public SqlSessionFactory backSqlSessionFactory(@Qualifier("primaryDataSource") DataSource backDataSource) throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(backDataSource);
/* sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources("classpath:"));*/
sessionFactory.setTypeAliasesPackage("**.**.******.Dao.**************Dao");
return sessionFactory.getObject();
}
}
@Configuration
@MapperScan(basePackages = {"**.**.******.Dao.********Dao"}, sqlSessionFactoryRef = "secondarySqlSessionFactory")
public class secondaryDataSourceConfig {
@Bean(name = "secondaryDataSource")
@ConfigurationProperties(prefix = "spring.datasource.secondary")
public DataSource secondaryDataSource(){
return DruidDataSourceBuilder.create().build();
}
@Primary
// 创建该数据源的事务管理
@Bean(name = "secondaryTransactionManager")
public DataSourceTransactionManager primaryTransactionManager() throws SQLException {
return new DataSourceTransactionManager(secondaryDataSource());
}
@Bean(name = "secondarySqlSessionFactory")
public SqlSessionFactory backSqlSessionFactory(@Qualifier("secondaryDataSource") DataSource backDataSource) throws Exception {
final SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(backDataSource);
/* sessionFactory.setMapperLocations(new PathMatchingResourcePatternResolver()
.getResources("classpath:"));*/
sessionFactory.setTypeAliasesPackage("**.**.******.******************Dao");
return sessionFactory.getObject();
}
}
在两个配置类中,将对不同的数据源的Dao层的操作放在不同的package下边,MapperScan指定不同的数据源对应扫描不同的Dao层接口。这样就可以实现同一个工程中对不同的数据源的访问了。当做笔记,之后用到可当做参考。