[记录druid整合springboot+logback配置打印sql日志]
整合记录
首先看 druid 的LogFilter 为我们准备的四种logger类型
这些logger分别对应打印 datasource相关、connection相关、statement相关、resultSet相关的日志
你需要在你的logback.xml中加入一个logger,这里我只加了statement相关的logger 为了打印可执行的sql
你可以全搞上 比如这样
再看 LogFilter 打印日志主要使用以下方法
在LogFilter实现类(也就是Slf4jLogFilter、Log4jFilter、Log4j2Filter、CommonsLogFilter)中 实现了这些抽象方法
好了 看到了 使用debug和error来打印日志。在你使用的日志组件中将打印级别调成debug以上就可以了
这里 我单独用了一个logger 原因如下
是springboot 启动时将root的level调成DEBUG将打印很多日志我不想要这些日志,所以修改了日志级别,这样导致级别过高,druid的日志信息打不出来。因此 单独配置一个logger来记录,效果如下
完整的logback.xml如下
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="true">
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<!-- encoders are assigned the type
ch.qos.logback.classic.encoder.PatternLayoutEncoder by default -->
<encoder>
<pattern>[%d{HH:mm:ss}] %-4relative [%thread] %-5level %logger{50} - %msg %n</pattern>
</encoder>
</appender>
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- Support multiple-JVM writing to the same log file -->
<prudent>true</prudent>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>/storagelogs1111/storage_log_%d{yyyy-MM-dd}.log</fileNamePattern>
<maxHistory>60</maxHistory>
<totalSizeCap>20GB</totalSizeCap>
</rollingPolicy>
<encoder>
<pattern>[%d{HH:mm:ss}] %-4relative [%thread] %-5level %logger{50} - %msg %n</pattern>
</encoder>
</appender>
<logger name="com.helper.util" level="ERROR">
</logger>
<logger name="druid.sql.Statement" level="debug">
</logger>
<logger name="druid.sql.DataSource" level="debug">
</logger>
<logger name="druid.sql.Connection" level="debug">
</logger>
<logger name="druid.sql.ResultSet" level="debug">
</logger>
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
druid 的配置(使用java 注解形式 也可以使用xml形式)
package com.helper.config;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import javax.sql.DataSource;
import com.alibaba.druid.filter.Filter;
import com.alibaba.druid.filter.logging.Slf4jLogFilter;
import com.alibaba.druid.pool.DruidDataSource;
import com.helper.util.Tools;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
@Configuration
@PropertySource(value = { "classpath:testdb.properties" })
@ComponentScan("com.helper.*")
public class DBconfig {
private static final Logger LOG = LoggerFactory.getLogger(DBconfig.class);
@Value("${test.val1}")
private String val1;
@Value("${test.val2}")
private String val2;
@Value("${test.val3}")
private String val3;
@Value("${druid.jdbc.url}")
private String url;
@Value("${druid.jdbc.user}")
private String user;
@Value("${druid.jdbc.password}")
private String password;
@Value("${druid.jdbc.filters}")
private String filters;
@Bean(initMethod = "init", destroyMethod = "close")
public DataSource getDataSource() {
// try {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl(url);
dataSource.setUsername(user);
dataSource.setPassword(password);
//注意 注意 注意 使用了 dataSource.setProxyFilters() 这里再放filters会重复打印 这里直接不用了
//dataSource.setFilters(filters);
dataSource.setMaxActive(20);
dataSource.setInitialSize(1);
dataSource.setMaxWait(60000);
dataSource.setMinIdle(1);
dataSource.setTimeBetweenEvictionRunsMillis(60000);
dataSource.setMinEvictableIdleTimeMillis(300000);
dataSource.setTestWhileIdle(true);
dataSource.setTestOnBorrow(false);
dataSource.setTestOnReturn(false);
dataSource.setPoolPreparedStatements(true);
dataSource.setMaxOpenPreparedStatements(20);
dataSource.setAsyncInit(true);
Slf4jLogFilter logFilter = new Slf4jLogFilter();
logFilter.setStatementExecutableSqlLogEnable(true);
logFilter.setStatementLogEnabled(true);
List<Filter> filterList = Arrays.asList(logFilter);
dataSource.setProxyFilters(filterList);
return dataSource;
// } catch (SQLException e) {
// LOG.info(" init datasource occure error, message :{} ", e.getMessage());
// }
// return null;
}
@Bean
public JdbcTemplate getJdbcTemplate() {
return new JdbcTemplate(getDataSource());
}
@Bean
public NamedParameterJdbcTemplate getNamedJdbcTemplate() {
return new NamedParameterJdbcTemplate(getDataSource());
}
@Bean
public Tools testObject() {
LOG.info(" val1: {} ,val2 : {} ,val3 : {} ", val1, val2, val3);
return new Tools();
}
}
有什么不对的地方请大家指正,谢谢
附上druid相关链接
常见问题
https://github.com/alibaba/druid/wiki/%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98
数据源默认配置
https://github.com/alibaba/druid/wiki/DruidDataSource%E9%85%8D%E7%BD%AE
配置_LogFilter
https://github.com/alibaba/druid/wiki/%E9%85%8D%E7%BD%AE_LogFilter
logback logger配置详解
http://logback.qos.ch/manual/configuration.html