1111
1.pom.xml
<!-- Spring-data-jpa依赖 自动创建表-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<!--lombok 省去写set、get的麻烦-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.16.14</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba/druid -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.0.29</version>
</dependency>
2.application.yml
#datasource
spring:
datasource:
name: era
url: jdbc:mysql://localhost:3306/test1?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC
username: root
password: 123456
# 使用druid数据源
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.jdbc.Driver
# 下面为连接池的补充设置,应用到上面所有数据源中
# 初始化大小,最小,最大
initialSize: 5
minIdle: 5
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
validationQuery: SELECT 1 FROM DUAL
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
# 打开PSCache,并且指定每个连接上PSCache的大小
poolPreparedStatements: true
maxPoolPreparedStatementPerConnectionSize: 20
# 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
spring.datasource.filters: stat,wall,log4j
# 通过connectProperties属性来打开mergeSql功能;慢SQL记录
connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
# 合并多个DruidDataSource的监控数据
#useGlobalDataSourceStat: true
jpa:
hibernate:
ddl-auto: update
show-sql: true
3.DruidDataSourceConfig.java
package com.example.threadlocal;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.support.http.StatViewServlet;
import com.alibaba.druid.support.http.WebStatFilter;
import com.alibaba.druid.support.spring.stat.DruidStatInterceptor;
import org.springframework.aop.framework.autoproxy.BeanNameAutoProxyCreator;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableTransactionManagement
/**
* Druid的DataResource配置类
* Created by fee
*/
public class DruidDataSourceConfig {
@Value("${spring.datasource.name}")
String applicationName;
@Value("${spring.datasource.url}")
String url;
@Value("${spring.datasource.driver-class-name}")
String driverClassName;
@Value("${spring.datasource.username}")
String username;
@Value("${spring.datasource.password}")
String password;
@Value("${spring.datasource.initialSize}")
Integer initialSize;
@Value("${spring.datasource.minIdle}")
Integer minIdle;
@Value("${spring.datasource.maxWait}")
Long maxWait;
@Value("${spring.datasource.maxActive}")
Integer maxActive;
@Value("${spring.datasource.minEvictableIdleTimeMillis}")
Long minEvictableIdleTimeMillis;
@Bean
public DataSource dataSource() {
DruidDataSource datasource = new DruidDataSource();
datasource.setUrl(url);
datasource.setDriverClassName(driverClassName);
datasource.setUsername(username);
datasource.setPassword(password);
datasource.setInitialSize(Integer.valueOf(initialSize));
datasource.setMinIdle(Integer.valueOf(minIdle));
datasource.setMaxWait(Long.valueOf(maxWait));
datasource.setMaxActive(Integer.valueOf(maxActive));
datasource.setMinEvictableIdleTimeMillis(
Long.valueOf(minEvictableIdleTimeMillis));
try {
datasource.setFilters("stat,wall");
} catch (SQLException e) {
e.printStackTrace();
}
return datasource;
}
@Bean
public ServletRegistrationBean druidServlet() {
ServletRegistrationBean servletRegistrationBean = new ServletRegistrationBean();
servletRegistrationBean.setServlet(new StatViewServlet());
servletRegistrationBean.addUrlMappings("/druid/*");
Map<String, String> initParameters = new HashMap<String, String>();
// initParameters.put("loginUsername", "druid");// 用户名
// initParameters.put("loginPassword", "druid");// 密码
initParameters.put("resetEnable", "false");// 禁用HTML页面上的“Reset All”功能
initParameters.put("allow", "127.0.0.1"); // IP白名单 (没有配置或者为空,则允许所有访问)
// initParameters.put("deny", "192.168.20.38");// IP黑名单
// (存在共同时,deny优先于allow)
servletRegistrationBean.setInitParameters(initParameters);
return servletRegistrationBean;
}
@Bean
public FilterRegistrationBean filterRegistrationBean() {
FilterRegistrationBean filterRegistrationBean = new FilterRegistrationBean();
filterRegistrationBean.setFilter(new WebStatFilter());
filterRegistrationBean.addUrlPatterns("/*");
filterRegistrationBean.addInitParameter("exclusions", "*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*");
return filterRegistrationBean;
}
// 按照BeanId来拦截配置 用来bean的监控
@Bean(value = "druid-stat-interceptor")
public DruidStatInterceptor DruidStatInterceptor() {
DruidStatInterceptor druidStatInterceptor = new DruidStatInterceptor();
return druidStatInterceptor;
}
@Bean
public BeanNameAutoProxyCreator beanNameAutoProxyCreator() {
BeanNameAutoProxyCreator beanNameAutoProxyCreator = new BeanNameAutoProxyCreator();
beanNameAutoProxyCreator.setProxyTargetClass(true);
// 设置要监控的bean的id
//beanNameAutoProxyCreator.setBeanNames("sysRoleMapper","loginController");
beanNameAutoProxyCreator.setInterceptorNames("druid-stat-interceptor");
return beanNameAutoProxyCreator;
}
}
4.SpringUtil.java(获取bean)
package com.example.threadlocal;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
@Component
public class SpringUtil implements ApplicationContextAware {
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
if(SpringUtil.applicationContext == null) {
SpringUtil.applicationContext = applicationContext;
}
}
//获取applicationContext
public static ApplicationContext getApplicationContext() {
return applicationContext;
}
//通过name获取 Bean.
public static Object getBean(String name){
return getApplicationContext().getBean(name);
}
//通过class获取Bean.
public static <T> T getBean(Class<T> clazz){
return getApplicationContext().getBean(clazz);
}
//通过name,以及Clazz返回指定的Bean
public static <T> T getBean(String name,Class<T> clazz){
return getApplicationContext().getBean(name, clazz);
}
}
5.ConnectionManager.java
package com.example.threadlocal;
import com.alibaba.druid.pool.DruidDataSource;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
/**
* 连接池工具
*/
public class ConnectionManager {
static DataSource dataSource;
private static ThreadLocal<Connection> connThreadlocal = new ThreadLocal<Connection>();
private static Connection conn = null;
static {
dataSource = SpringUtil.getBean(DruidDataSource.class);
}
/**
* 获取连接
* @return
* @throws Exception
*/
public static Connection getConnection() {
if (connThreadlocal.get() == null) {
try {
conn = dataSource.getConnection();
conn.setAutoCommit(false);
connThreadlocal.set(conn);
} catch (SQLException e) {
e.printStackTrace();
}
}
return connThreadlocal.get();
}
/**
* 关闭连接
* @return
* @throws Exception
*/
public static void close() {
try {
connThreadlocal.get().setAutoCommit(true);
dataSource.getConnection().close();
connThreadlocal.get().close();
connThreadlocal.set(null);
} catch (SQLException e) {
e.printStackTrace();
}
}
/**
* 提交
* @return
* @throws Exception
*/
public static void commit() {
try {
connThreadlocal.get().commit();
connThreadlocal.get().setAutoCommit(true);
} catch (SQLException e) {
rollback();
} finally {
close();
}
}
/**
* 回滚
* @return
* @throws Exception
*/
public static void rollback() {
try {
connThreadlocal.get().rollback();
connThreadlocal.get().setAutoCommit(true);
} catch (Exception e) {
e.printStackTrace();
}
}
}
6.User.java
package com.example.threadlocal;
import lombok.Data;
import javax.persistence.*;
@Data
@Entity
@Table(name = "USER")
public class User {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Integer id;
@Column(length = 20)
private String name;
}
7.UserDao.java
package com.example.threadlocal;
import org.springframework.stereotype.Repository;
import java.sql.Connection;
import java.sql.PreparedStatement;
@Repository
public class UserDao {
/**
* 添加
* @return
*/
public int add() {
Connection conn = null;
PreparedStatement ps = null;
int result = 0;
try {
conn = ConnectionManager.getConnection();
ps = conn.prepareStatement("insert into user(name) values (?)");
ps.setString(1, "小白");
result = ps.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
return result;
}
/**
* 删除
* @return
*/
public int delete() {
Connection conn = null;
PreparedStatement ps = null;
int result = 0;
try {
conn = ConnectionManager.getConnection();
ps = conn.prepareStatement("delete from user where name=?");
ps.setString(1, "小白");
result = ps.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (ps != null) {
ps.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
return result;
}
}
8.UserService.java
package com.example.threadlocal;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
public class UserService {
@Autowired
UserDao userDao;
public void test() {
userDao.add();
userDao.delete();
ConnectionManager.commit();
}
}
9.UserController.java
package com.example.threadlocal;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class UserController {
@Autowired
UserService userService;
@GetMapping("test")
public String test() {
userService.test();
return "success";
}
}
10.访问 http://localhost:8080/test,可以在service层中人为制造异常,观察数据库的插入情况。