druid生成sqlList日志解析

该博客介绍了一种用于统计项目组SQL性能和规范的方法,通过Druid的Web监控日志,使用Java代码解析JSON并分析SQL。主要功能包括:提取日志中的SQL列表、去重、按执行时间排序、筛选无WHERE条件的SELECT语句,并将结果导出为Excel或TXT文件。此外,提供了相关代码实现。
摘要由CSDN通过智能技术生成

最近要统计项目组的sql,分析一下性能和sql规范,写了一个小方法,在这记录一下吧,后续还完善。

在druid的web监控点击登记日志。

页面操作就不在这配图说明了

在log文件中,将sqlList找出来

在.log的日志文件中将sqlList后面的json内容通过sed截取出来,输出到xx.txt文件中

find ./* -name '*.log'|xargs grep 'sqlList'|sed -r 's/.*sqlList\"\:(.*)\}\r\n/\1/g' >xx.txt

通过java代码解析

解析json文件,按需求输出几个文件。
这里的原始文件路径没做灵活,在INPUT_DIR目录下,还有一层目录(如:不同的module),然后是文件xx.txt

import com.alibaba.fastjson.JSON;
import com.demo.util.ExportExcel;
import org.thymeleaf.util.StringUtils;

import java.io.*;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.*;
import java.util.stream.Collectors;

public class ParseDruidJson {

    /**
     * 原始解析结果
     * key - inputfile name
     */
    private static Map<File, List<DruidLogBean>> beansMap = new HashMap<>();
    /**
     * 去重后的sql
     * key - inputfile name
     */
    private static Map<File, Set<String>> distinctSqlMap = new HashMap<>();

    /**
     * 输入文件的文件夹,输出文件在输入文件的名字后面加上parse
     */
    private static final String INPUT_DIR = "C:\\xx\\TEMP\\xx\\sqlFile";

    /**
     * 一条输出字符串的分隔符
     */
    private static final String SPLIT_STR = "#";


    public static void main(String[] args) throws Exception {
        File inputDirFile = new File(INPUT_DIR);
        if(inputDirFile.isFile()){
            return;
        }
        // 遍历文件夹下所有文件,解析
        for(File inputFileSec  : inputDirFile.listFiles()){
            if(inputFileSec.isFile()){
                continue;
            }
            for(File inputFile: inputFileSec.listFiles()){
                // 原始文件不能有“_"
                if(inputFile.getName().contains("_")){
                    continue;
                }

                BufferedReader reader = new BufferedReader(new FileReader(inputFile));
                String sqlListLine = reader.readLine();
                List<DruidLogBean> beans = new ArrayList<>();
                Set<String> distinctSql = new HashSet<>();
                beansMap.put(inputFile, beans);
                distinctSqlMap.put(inputFile, distinctSql);

                while(!StringUtils.isEmpty(sqlListLine)){
                    beans.addAll(JSON.parseArray(sqlListLine, DruidLogBean.class));
                    sqlListLine= reader.readLine();
                }

                // 遍历每条sql对象
                for (DruidLogBean bean : beans) {
                    // 暂不考虑sql中有写死值得情况,去重的sql,都转成大写
                    distinctSql.add(bean.getSql().toUpperCase(Locale.ROOT));
                }

                System.out.println(inputFile.getAbsolutePath() + "文件有sql数量:" + beans.size());
            }
        }

        printAllSql();
        printSqlDistinct();
        printSqlSortByAvgTimeDesc();
        printSqlWithoutWhere();
    }


    /**
     * 获取输出文件
     * 输出文件是在原来文件加_$outSubfix,如果文件存在,删除后重建
     * @param inputFile
     * @param outSubfix
     * @param expandedName 拓展名,如:.xls
     * @return
     * @throws IOException
     */
    private static File getOutputFile(File inputFile, String outSubfix, String expandedName) throws IOException {
        String fileName = inputFile.getAbsolutePath();
        int pointIndex = fileName.lastIndexOf(".");

        String outputFilename = null;
        if(StringUtils.isEmpty(expandedName)){

            outputFilename = fileName.substring(0, pointIndex) + "_" + outSubfix + fileName.substring(pointIndex);
        }else{
            outputFilename = fileName.substring(0, pointIndex) + "_" + outSubfix + expandedName;
        }
        File outputFile = new File(outputFilename);
        if(outputFile.exists()){
            outputFile.delete();
            outputFile.createNewFile();
        }

        return outputFile;
    }

    /**
     * 打印所有的sql
     * sql#ExecuteMillisTotal#ExecuteCount#avgExecuteMillis
     * @throws IOException
     */
    private static void printAllSql() throws Exception {
        System.out.println("---------printAllSql------------");

        for(Map.Entry<File, List<DruidLogBean>> entity : beansMap.entrySet()){

            Map<String, List<String>> classifyMap = new HashMap();

            File outputFile = getOutputFile(entity.getKey(), "all", ".xls");

            for(DruidLogBean bean : entity.getValue()){
                String printSql = bean.getSql() + SPLIT_STR + bean.getExecuteMillisTotal() + SPLIT_STR + bean.getExecuteCount() + SPLIT_STR + getExecuteAVGTime(bean.getExecuteMillisTotal(), bean.getExecuteCount());
                classfiyByDMLType(classifyMap, printSql);
            }

            List<String> printSql = new ArrayList<>();
            for(Map.Entry<String, List<String>> entrySql: classifyMap.entrySet()){
                for(String sql : entrySql.getValue()){
                    printSql.add(sql);
                }
            }
            String[] rowName = new String[]{"SQL", "ExecuteMillisTotal(ms)", "ExecuteCount", "ExecuteAVGTime(ms)"};
            printToExcel(outputFile.getAbsolutePath(), null, rowName, printSql);
//            printToTxt(outputFile.getAbsolutePath(), printSql);
            System.out.println("输出文件:" + outputFile.getAbsolutePath());
        }
    }


    /**
     * 输出成excel
     * @param fileName
     * @param tableName
     * @param sqlList
     * @throws Exception
     */
    private static void printToExcel(String fileName, String tableName, String[] rowName,List<String> sqlList) throws Exception {
        if(fileName.endsWith(".txt")){
            fileName.replace(".txt", ".xls");
        }
        List<Object[]> rows = new ArrayList<>();
        for(String sql : sqlList){
            rows.add(sql.split(SPLIT_STR));
        }

        if(StringUtils.isEmpty(tableName)){
            tableName = "result";
        }

        FileOutputStream exportOutputStream = new FileOutputStream(fileName);
        if(rows != null && rows.size() > 0){
            ExportExcel exportExcel = new ExportExcel(tableName, rowName, rows);
            exportExcel.export(exportOutputStream);
        }
        exportOutputStream.flush();
        exportOutputStream.close();
    }


    /**
     * 输出成txt
     * @param fileName
     * @param sqlList
     * @throws IOException
     */
    private static void printToTxt(String fileName, List<String> sqlList) throws IOException {
        BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName,true)));
        for(String sql : sqlList){
            out.write(sql);
            out.newLine();
        }
        out.flush();
        out.close();
    }

    /**
     * 按照sql的DML类型进行分类存放
     * @param classfiedMap
     * @param sql
     */
    private static void classfiyByDMLType(Map<String, List<String>> classfiedMap, String sql){
        String dMLType = sql.substring(0, sql.indexOf(" ")).toLowerCase(Locale.ROOT);
        List<String> dMLList = classfiedMap.get(dMLType);
        if(dMLList == null){
            dMLList = new ArrayList<>();
            classfiedMap.put(dMLType, dMLList);
        }

        dMLList.add(sql);
    }


    /**
     * 按照平均执行时间倒序排序
     */
    private static void printSqlSortByAvgTimeDesc() throws IOException{
        System.out.println("---------printSqlSortByAvgTimeDesc------------");
        for(Map.Entry<File, List<DruidLogBean>> entity : beansMap.entrySet()){
            File outputFile = getOutputFile(entity.getKey(), "sort", null);
            BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile,true)));

            List<Map<String, String>> sortList = new ArrayList<>();
            for(DruidLogBean bean : entity.getValue()){
                Map<String, String> beanMap = new HashMap<>();
                beanMap.put("avgTime", getExecuteAVGTime(bean.getExecuteMillisTotal(), bean.getExecuteCount()));
                beanMap.put("sql", bean.getSql());
                sortList.add(beanMap);
            }

            List<Map<String, String>> sortListNew = sortList.stream().sorted((s2, s1) ->(new BigDecimal(s1.get("avgTime"))).compareTo(new BigDecimal(s2.get("avgTime")))).collect(Collectors.toList());

            for(Map<String, String> beanMap : sortListNew){
                out.write(beanMap.get("sql") + SPLIT_STR + beanMap.get("avgTime"));
                out.newLine();
            }
            out.flush();
            out.close();
            System.out.println("输出文件:" + outputFile.getAbsolutePath());
        }
    }

    /**
     * 获取平均执行时间
     * @param totalTime
     * @param count
     * @return
     */
    private static String getExecuteAVGTime(String totalTime, String count){

        BigDecimal avgTime = (new BigDecimal(totalTime)).divide(new BigDecimal(count), 2, RoundingMode.HALF_UP);
//        if(avgTime.compareTo(BigDecimal.ZERO) <= 0){
//            System.out.println(totalTime);
//        }
        return avgTime.toString();
    }


    /**
     * 输出没有where条件的查询sql
     * @throws IOException
     */
    private static void printSqlWithoutWhere() throws IOException {
        System.out.println("---------printSqlWithoutWhere------------");

        for(Map.Entry<File, Set<String>> entity : distinctSqlMap.entrySet()){
            File outputFile = getOutputFile(entity.getKey(), "withoutWhere", null);
            BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile,true)));
            Set<String> sqlSet = entity.getValue();
            for(String sql : sqlSet){
                if(isWithoutWhere(sql)){
                    out.write(sql);
                    out.newLine();
                }
            }

            out.flush();
            out.close();
            System.out.println("输出文件:" + outputFile.getAbsolutePath());
        }
    }

    /**
     * 判断sql是否有where
     * @param sql
     * @return
     */
    private static Boolean isWithoutWhere(String sql){
        sql = sql.toLowerCase(Locale.ROOT);
        if(!sql.startsWith("select")){
            return false;
        }

        if(sql.contains("where")){
            return false;
        }

        return true;
    }

    /**
     * 输出去重后的sql
     */
    private static void printSqlDistinct() throws Exception {
        System.out.println("---------printSqlDistinct------------");

        for(Map.Entry<File, Set<String>> entity : distinctSqlMap.entrySet()){
            File outputFile = getOutputFile(entity.getKey(), "distinct", ".xls");
            Set<String> sqlSet = entity.getValue();
            Map<String, List<String>> classifyMap = new HashMap();

            for(String sql : sqlSet){
                classfiyByDMLType(classifyMap, sql);
            }

            List<String> printSql = new ArrayList<>();
            for(Map.Entry<String, List<String>> entrySql: classifyMap.entrySet()){
                for(String sql : entrySql.getValue()){
                    printSql.add(sql);
                }
            }

            String[] rowName = new String[]{"SQL"};
            printToExcel(outputFile.getAbsolutePath(), null, rowName, printSql);
//            printToTxt(outputFile.getAbsolutePath(), printSql);

            System.out.println("输出文件:" + outputFile.getAbsolutePath());
        }
    }

}

DruidLogBean

public class DruidLogBean {
    String sql;
    String executeCount;
    String executeMillisTotal;

    public String getSql() {
        return sql;
    }

    public void setSql(String sql) {
        this.sql = sql;
    }

    public String getExecuteCount() {
        return executeCount;
    }

    public void setExecuteCount(String executeCount) {
        this.executeCount = executeCount;
    }

    public String getExecuteMillisTotal() {
        return executeMillisTotal;
    }

    public void setExecuteMillisTotal(String executeMillisTotal) {
        this.executeMillisTotal = executeMillisTotal;
    }
}

ExportExcel


import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.hssf.util.HSSFColor;
import org.apache.poi.ss.util.CellRangeAddress;

import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;

public class ExportExcel {

    // 显示的导出表的标题
    private String title;
    // 导出表的列名
    private String[] rowName;
    private List<Object[]> dataList = new ArrayList<Object[]>();

    public static void main(String[] args) {

        String title = "标题";
        String[] rowName = new String[]{"列1", "列2"};

        Object[] rowData = new String[]{"123456", ""};
        List<Object[]> list = new ArrayList<>();
        list.add(rowData);

        try {
            FileOutputStream outputStream = new FileOutputStream("C:\\Users\\63195\\Desktop\\xx.xls");

            ExportExcel exportExcel = new ExportExcel(title, rowName, list);
            exportExcel.export(outputStream);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    // 构造函数,传入要导出的数据
    public ExportExcel(String title, String[] rowName, List<Object[]> dataList) {
        this.dataList = dataList;
        this.rowName = rowName;
        this.title = title;
    }

    // 导出数据
    public void export(OutputStream out) throws Exception {
        try {
            HSSFWorkbook workbook = new HSSFWorkbook();
            HSSFSheet sheet = workbook.createSheet(title);

            // 产生表格标题行
            HSSFRow rowm = sheet.createRow(0);
            HSSFCell cellTitle = rowm.createCell(0);


            //sheet样式定义【】
            HSSFCellStyle columnTopStyle = this.getColumnTopStyle(workbook);
            HSSFCellStyle style = this.getStyle(workbook);
            sheet.addMergedRegion(new CellRangeAddress(0, 1, 0, (rowName.length - 1)));
            cellTitle.setCellStyle(columnTopStyle);
            cellTitle.setCellValue(title);

            // 定义所需列数
            int columnNum = rowName.length;
            HSSFRow rowRowName = sheet.createRow(2);

            // 将列头设置到sheet的单元格中
            for (int n = 0; n < columnNum; n++) {
                HSSFCell cellRowName = rowRowName.createCell(n);
                cellRowName.setCellType(HSSFCell.CELL_TYPE_STRING);
                HSSFRichTextString text = new HSSFRichTextString(rowName[n]);
                cellRowName.setCellValue(text);
                cellRowName.setCellStyle(columnTopStyle);

            }
            // 将查询到的数据设置到sheet对应的单元格中
            for (int i = 0; i < dataList.size(); i++) {
                Object[] obj = dataList.get(i);// 遍历每个对象
                HSSFRow row = sheet.createRow(i + 3);// 创建所需的行数

                for (int j = 0; j < obj.length; j++) {
                    HSSFCell cell = null;
//                    if (j == 0) {
//                        cell = row.createCell(j, HSSFCell.CELL_TYPE_NUMERIC);
//                        cell.setCellValue(i + 1);
//                    } else {
                        cell = row.createCell(j, HSSFCell.CELL_TYPE_STRING);
//                        if (!"".equals(obj[j]) && obj[j] != null) {
                        if (obj[j] != null) {
                            cell.setCellValue(obj[j].toString());
                        }
                    cell.setCellStyle(style);
//                    }

                }

            }

            // 让列宽随着导出的列长自动适应
            for (int colNum = 0; colNum < columnNum; colNum++) {
                int columnWidth = sheet.getColumnWidth(colNum) / 256;
                for (int rowNum = 0; rowNum < sheet.getLastRowNum(); rowNum++) {
                    HSSFRow currentRow;
                    if (sheet.getRow(rowNum) == null) {
                        currentRow = sheet.createRow(rowNum);
                    } else {
                        currentRow = sheet.getRow(rowNum);
                    }
                    if (currentRow.getCell(colNum) != null) {
                        HSSFCell currentCell = currentRow.getCell(colNum);
                        if (currentCell.getCellType() == HSSFCell.CELL_TYPE_STRING) {
                            int length = currentCell.getStringCellValue().getBytes().length;
                            if (columnWidth < length) {
                                columnWidth = length;
                            }
                        }
                    }
                }
                if (colNum == 0) {
//                    sheet.setColumnWidth(colNum, (columnWidth - 2) * 256);
                } else {
//                    sheet.setColumnWidth(colNum, (columnWidth + 4) * 256);
                }
            }

            if (workbook != null) {
                try {

                    workbook.write(out);

                } catch (Exception e) {
                    e.printStackTrace();
                }
            }

        } catch (Exception e) {
            e.printStackTrace();
        }
    }



    /*
     * 列头单元格样式
     */
    public HSSFCellStyle getColumnTopStyle(HSSFWorkbook workbook) {
        // 设置字体
        HSSFFont font = workbook.createFont();

        // 设置字体大小
        font.setFontHeightInPoints((short) 11);
        // 字体加粗
        font.setBoldweight(HSSFFont.BOLDWEIGHT_BOLD);
        // 设置字体名字
        font.setFontName("Courier New");
        // 设置样式
        HSSFCellStyle style = workbook.createCellStyle();
        // 设置低边框
        style.setBorderBottom(HSSFCellStyle.BORDER_THIN);
        // 设置低边框颜色
        style.setBottomBorderColor(HSSFColor.BLACK.index);
        // 设置右边框
        style.setBorderRight(HSSFCellStyle.BORDER_THIN);
        // 设置顶边框
        style.setTopBorderColor(HSSFColor.BLACK.index);
        // 设置顶边框颜色
        style.setTopBorderColor(HSSFColor.BLACK.index);
        // 在样式中应用设置的字体
        style.setFont(font);
        // 设置自动换行
        style.setWrapText(false);
        // 设置水平对齐的样式为居中对齐;
        style.setAlignment(HSSFCellStyle.ALIGN_CENTER);
        style.setVerticalAlignment(HSSFCellStyle.ALIGN_CENTER);
        return style;

    }

    public HSSFCellStyle getStyle(HSSFWorkbook workbook) {
        // 设置字体
        HSSFFont font = workbook.createFont();
        // 设置字体大小
        font.setFontHeightInPoints((short) 10);
        // 字体加粗
        font.setBoldweight(HSSFFont.BOLDWEIGHT_NORMAL);
        // 设置字体名字
        font.setFontName("Courier New");
        // 设置样式;
        HSSFCellStyle style = workbook.createCellStyle();
        // 设置底边框;
        style.setBorderBottom(HSSFCellStyle.BORDER_THIN);
        // 设置底边框颜色;
        style.setBottomBorderColor(HSSFColor.BLACK.index);
        // 设置左边框;
        style.setBorderLeft(HSSFCellStyle.BORDER_THIN);
        // 设置左边框颜色;
        style.setLeftBorderColor(HSSFColor.BLACK.index);
        // 设置右边框;
        style.setBorderRight(HSSFCellStyle.BORDER_THIN);
        // 设置右边框颜色;
        style.setRightBorderColor(HSSFColor.BLACK.index);
        // 设置顶边框;
        style.setBorderTop(HSSFCellStyle.BORDER_THIN);
        // 设置顶边框颜色;
        style.setTopBorderColor(HSSFColor.BLACK.index);
        // 在样式用应用设置的字体;
        style.setFont(font);
        // 设置自动换行;
        style.setWrapText(false);
        // 设置水平对齐的样式为居中对齐;
        style.setAlignment(HSSFCellStyle.ALIGN_LEFT);
        // 设置垂直对齐的样式为居中对齐;
        style.setVerticalAlignment(HSSFCellStyle.VERTICAL_CENTER);
        return style;
    }

支持输出到txt和excel
printAllSql 输出所有的sql
printSqlSortByAvgTimeDesc 按照sql平均执行时间倒序输出
printSqlWithoutWhere 输出没有where条件的select语句的
printSqlDistinct 输出去重后的sql

pom.xml的依赖就一股脑粘贴过来了

 <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.47</version>
        </dependency>
        <dependency>
            <groupId>org.apache.poi</groupId>
            <artifactId>poi</artifactId>
            <version>3.11</version>
        </dependency>

        <dependency>
            <groupId>commons-io</groupId>
            <artifactId>commons-io</artifactId>
            <version>2.6</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-thymeleaf</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.session</groupId>
            <artifactId>spring-session-core</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter</artifactId>
        </dependency>

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值