阿里druid配置及实现SQL日志输出(mybatis3.2.8+springmvc4.2.3.RELEASE+mysql5.6)

一、maven工程引入druid包

1.在pom.xml中增加:

<dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>druid</artifactId>
    <version>1.0.18</version>
</dependency>

2.在spring-mybatis.xml中增加druid数据库配置及日志配置:

<bean id="stat-filter" class="com.alibaba.druid.filter.stat.StatFilter">
    <property name="slowSqlMillis" value="30000" />
    <property name="logSlowSql" value="true" />
    <property name="mergeSql" value="true" />
    <property name="dbType" value="mysql" />
</bean>
<bean id="log-filter" class="com.alibaba.druid.filter.logging.Log4jFilter">
    <property name="statementExecutableSqlLogEnable" value="true" />
</bean>
<bean id="wall-filter" class="com.alibaba.druid.wall.WallFilter">
    <property name="config" ref="wall-config" />
</bean>

<bean id="wall-config" class="com.alibaba.druid.wall.WallConfig">
    <property name="multiStatementAllow" value="true" />
</bean>

<bean name="dataSource" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
    <property name="url" value="${jdbc.url}" />
    <property name="username" value="${jdbc.username}" />
    <property name="password" value="${jdbc.password}" />

    <!-- 配置初始化大小、最小、最大 -->
    <property name="initialSize" value="${druid.initialSize}" />
    <property name="minIdle" value="${druid.minIdle}" />
    <property name="maxActive" value="${druid.maxActive}" />

    <!-- 配置获取连接等待超时的时间 -->
    <property name="maxWait" value="${druid.maxWait}" />
    <!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
    <property name="timeBetweenEvictionRunsMillis" value="${druid.timeBetweenEvictionRunsMillis}" />

    <!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
    <property name="minEvictableIdleTimeMillis" value="${druid.minEvictableIdleTimeMillis}" />

    <property name="validationQuery" value="${druid.validationQuery}" />
    <property name="testWhileIdle" value="${druid.testWhileIdle}" />
    <property name="testOnBorrow" value="${druid.testOnBorrow}" />
    <property name="testOnReturn" value="${druid.testOnReturn}" />

    <!-- 打开PSCache,并且指定每个连接上PSCache的大小 如果用Oracle,则把poolPreparedStatements配置为true,mysql可以配置为false。 -->
    <property name="poolPreparedStatements" value="${druid.poolPreparedStatements}" />
    <property name="maxPoolPreparedStatementPerConnectionSize"   value="${druid.maxPoolPreparedStatementPerConnectionSize}" />

    <property name="removeAbandoned" value="${druid.removeAbandoned}" /> <!-- 打开removeAbandoned功能 -->
    <property name="removeAbandonedTimeout" value="${druid.removeAbandonedTimeout}" /> <!-- 1800秒,也就是30分钟 -->
    <property name="logAbandoned" value="${druid.logAbandoned}" /> <!-- 关闭abanded连接时输出错误日志 -->

    <!-- 配置监控统计拦截的filters -->
    <property name="filters" value="${druid.filters}" />
    <property name="proxyFilters">
        <list>
            <ref bean="stat-filter" />
            <ref bean="wall-filter" />
            <ref bean="log-filter" />
        </list>
    </property>
    <!-- <property name="connectionProperties" value="${druid.connectionProperties}" /> -->
    <property name="useGlobalDataSourceStat" value="${druid.useGlobalDataSourceStat}" />
</bean>

3、在web.xml中增加log4j配置注释掉

<!--<context-param>
    <param-name>log4jConfigLocation</param-name>
    <param-value>classpath:log4j.xml</param-value>
</context-param>
<listener>
    <listener-class>org.springframework.web.util.Log4jConfigListener</listener-class>
</listener>-->
<filter>
    <filter-name>urlFilter</filter-name>
    <filter-class>com.zdnst.core.filter.UrlFilter</filter-class>
</filter>
<filter-mapping>
    <filter-name>urlFilter</filter-name>
    <url-pattern>/*</url-pattern>
</filter-mapping>

 

4、log4j.xml中配置

<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE log4j:configuration PUBLIC "-//Apache//DTD Log4j 1.2//EN"
                                    "http://logging.apache.org/log4j/docs/api/org/apache/log4j/xml/log4j.dtd">
<log4j:configuration>
   <appender class="org.apache.log4j.ConsoleAppender" name="RootConsoleAppender">
      <param name="Threshold" value="debug" />
      <layout class="org.apache.log4j.PatternLayout">
         <param name="ConversionPattern" value="%d{ABSOLUTE} %5p [%t] %40.40c:%4L - %m%n" />
      </layout>
      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="?" />
         <param name="AcceptOnMatch" value="false" />
      </filter>
      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="Parameters" />
         <param name="AcceptOnMatch" value="false" />
      </filter>
      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="Types" />
         <param name="AcceptOnMatch" value="false" />
      </filter>
   </appender>

   <appender class="org.apache.log4j.RollingFileAppender" name="SQLFileAppender">
      <param name="Threshold" value="debug" />
      <param name="File" value="${catalina.base}/logs/jsf-sql.log" />
      <param name="MaxFileSize" value="10MB" />
      <param name="MaxBackupIndex" value="10" />
      <layout class="org.apache.log4j.PatternLayout">
         <param name="ConversionPattern" value="%d{ABSOLUTE} %5p [%t] %40.40c:%4L - %m%n" />
      </layout>
      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="?" />
         <param name="AcceptOnMatch" value="false" />
      </filter>

      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="SELECT" />
         <param name="AcceptOnMatch" value="false" />
      </filter>
      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="select" />
         <param name="AcceptOnMatch" value="false" />
      </filter>

      <!--<filter class="org.apache.log4j.varia.StringMatchFilter">-->
         <!--<param name="StringToMatch" value="clearParameters" />-->
         <!--<param name="AcceptOnMatch" value="false" />-->
      <!--</filter>-->
      <!---->
      <!--<filter class="org.apache.log4j.varia.StringMatchFilter">-->
         <!--<param name="StringToMatch" value="delete" />-->
         <!--<param name="AcceptOnMatch" value="false" />-->
      <!--</filter>-->

      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="Parameters" />
         <param name="AcceptOnMatch" value="false" />
      </filter>
      <filter class="org.apache.log4j.varia.StringMatchFilter">
         <param name="StringToMatch" value="Types" />
         <param name="AcceptOnMatch" value="false" />
      </filter>

   </appender>


   <logger name="druid.sql.Statement" additivity="false">
      <level value="debug"/>
      <appender-ref ref="SQLFileAppender"/>
      <appender-ref ref="RootConsoleAppender"/>
   </logger>

   <logger name="com.zdnst" >
      <level value="debug" />
   </logger>
   <logger name="org.apache.commons.beanutils.converters">
      <level value="info"/>
   </logger>
   
   <logger name="org.springframework.data">
      <level value="debug" />
   </logger>
   
   <logger name="org.springframework.web">
      <level value="info" />
   </logger>

   <logger name="druid.sql.ResultSet">
      <level value="ERROR" />
   </logger>

   <!--<logger name="ns.beanutils.converters">-->
      <!--<level value="ERROR" />-->
   <!--</logger>-->

   <logger name="org.dozer">
      <level value="ERROR" />
   </logger>

   <logger name="org.apache.cxf">
      <level value="info" />
   </logger>

   <logger name="org.apache.kafka.clients.consumer.internals">
      <level value="info"/>
   </logger>

   <logger name="org.springframework.kafka.listener">
      <level value="info"/>
   </logger>

   <root>
      <level value="debug" />
      <appender-ref ref="RootConsoleAppender"/>
   </root>


</log4j:configuration>

5、UrlFilter增加

package com.zdnst.core.filter;

import com.github.sd4324530.fastweixin.util.JSONUtil;
import jodd.io.StreamUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonReader;
import javax.servlet.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.util.HashMap;
import java.util.Map;

/**
 * Created by yi.lu on 2017/3/14.
 */
public class UrlFilter implements Filter {
    private static Logger logger = LoggerFactory.getLogger(UrlFilter.class);
    public void init(FilterConfig filterConfig) throws ServletException {

    }
    public void destroy() {

    }
    public void doFilter(ServletRequest servletRequest,ServletResponse servletResponse,FilterChain filterChain)
            throws IOException, ServletException {
        try {


            /**
             * 1,doFilter的第一个参数为ServletRequest对象。此对象给过滤器提供了对进入的信息(包括  
             * 表单数据、cookie和HTTP请求头)的完全访问。第二个参数为ServletResponse,通常在简单的过  
             * 滤器中忽略此参数。最后一个参数为FilterChain,此参数用来调用servlet或JSP页。
             */
            HttpServletRequest localRequest = (HttpServletRequest) servletRequest;
            HttpServletResponse localResponse = (HttpServletResponse) servletResponse;
            /**
             * 如果处理HTTP请求,并且需要访问诸如getHeader或getCookies等在ServletRequest中  
             * 无法得到的方法,就要把此request对象构造成HttpServletRequest
             */
            if(localRequest.getContentType()!=null&&localRequest.getContentType().indexOf("application/json") != -1){
                String currentURL = localRequest.getRequestURI(); // 取得根目录所对应的绝对路径:

                HttpServletRequestWrapper2 newReq = new HttpServletRequestWrapper2(localRequest);
                Map<String, Object> value = convertParams(newReq);
                StringBuffer logSb=new StringBuffer("-------------------访问请求"+ localRequest.getMethod()+":");
                logSb.append(currentURL+"&");
                if (value != null && value.size() > 0) {
                    int i=0;
                    for (String key : value.keySet()) {
                        if(i++>0){
                            logSb.append("&");
                        }
                        logSb.append(key + "=" + value.get(key));
                    }
                }
                long startTime = System.currentTimeMillis();

                // 加入filter链继续向下执行
                JsonReader jsonReader = Json.createReader(new StringReader(JSONUtil.toJson(value)));
                JsonObject json = jsonReader.readObject();

                ByteArrayOutputStream out = new ByteArrayOutputStream();
                Json.createWriter(out).writeObject(json);
                byte[] body = out.toByteArray();
                newReq.setBody(body);
                filterChain.doFilter(newReq,localResponse);
                long endTime = System.currentTimeMillis();
                logger.info(logSb.toString());
                logger.info("-------------------访问总共用时:"+(endTime-startTime)+"毫秒");
            }else{
                filterChain.doFilter(servletRequest, servletResponse);
            }

        }catch (Exception e){
            logger.error("过滤器运行错误");
            filterChain.doFilter(servletRequest, servletResponse);
        }
    }

    /**
     * 拦截request中的所有参数(GET/POST)
     * @param request
     * @return
     */
    private Map<String,Object> convertParams(HttpServletRequestWrapper2 request){
        Map<String, String[]> params = request.getParameterMap();
        Map<String, Object> param = new HashMap<String, Object>();
        if(params.size() == 0){
            StringBuffer json = new StringBuffer();
            String line = new String(request.getBody());
            Map<String,Object> result  = JSONUtil.toMap(line);
//            if(null != result){
//                for (Map.Entry<String, Object> entry : result.entrySet()) {
//                    param.put(entry.getKey(),String.valueOf(entry.getValue()));
//                }
//            }
            param = result;
        }else{
            for (Map.Entry<String, String[]> entry : params.entrySet()) {
                param.put(entry.getKey(), entry.getValue()[0]);
            }
        }
        return param;
    }

    private class HttpServletRequestWrapper2 extends HttpServletRequestWrapper {

        private  byte[] body;

        public byte[] getBody() {
            return body;
        }

        public void setBody(byte[] body) {
            this.body = body;
        }

        /**
         * Constructs a request object wrapping the given request.
         *
         * @param request
         * @throws IllegalArgumentException if the request is null
         */
        public HttpServletRequestWrapper2(HttpServletRequest request) throws IOException {
            super(request);
            body = StreamUtil.readBytes(request.getReader(), "UTF-8");
        }

        @Override
        public BufferedReader getReader() throws IOException {
            return new BufferedReader(new InputStreamReader(getInputStream()));
        }

        @Override
        public ServletInputStream getInputStream() throws IOException {
            final ByteArrayInputStream bais = new ByteArrayInputStream(body);
            return new ServletInputStream() {


                @Override
                public int read() throws IOException {
                    return bais.read();
                }
            };
        }



    }

}

 

转载于:https://my.oschina.net/u/2322635/blog/844990

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值