使用JsonParser流式解析json,并使用DataFrame进行矩阵转置。

需求:将一个结构化不太好的原始的大json文件,转为CSV文件,有{{}}嵌套也有[[ ]]嵌套。
在这里插入图片描述
思路:
1 .肯定不能使用原始的LIst Map…
2. 尽量减少对line 的遍历。
3. 可适当采用中间文件。

package convert;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import joinery.DataFrame;


import java.io.*;
import java.math.BigDecimal;


/**
 * @author zijian Wang
 * @date 2021/8/10 16:04
 * @VERSION 1.0
 */
public class convert2traindata {

    private static String filePath_origin ;
    private  static  String outPutPath;

    //intermediateFile
    private static String filePath_model = "E:\\change\\model_train.csv";
    private static String filePath_model_index = "E:\\change\\model_train_index.csv";
    private static String filePath_model_transpose = "E:\\change\\transpose.csv";
    private static String filePath_model_res;
   //window
    //private static String delimiter="\\";
    //linux
    private static String delimiter="/";

    public static void main(String[] args) throws IOException {

        //加载参数1.输入路径和文件名
        //加载参数2  输出的路径,名称和源输入文件一样。
        //linux
        filePath_origin=args[0];
        outPutPath =args[1];

        //window
/*        filePath_origin="E:\\change\\origin.json";
        outPutPath ="E:\\change\\";*/
        String outPutFileName= filePath_origin.substring(filePath_origin.lastIndexOf(delimiter)+1,filePath_origin.lastIndexOf("."));
        //生成输出路径
        filePath_model=outPutPath+outPutFileName+"_model.csv";
        filePath_model_index=outPutPath+outPutFileName+"_index.csv";
        filePath_model_transpose=outPutPath+outPutFileName+"_transpose.csv";
        filePath_model_res=outPutPath+outPutFileName+".csv";
      long startTime = System.currentTimeMillis();
        convert2traindata();
        mergeFile(filePath_model, filePath_model_index);
        transpose(filePath_model_index);
        printResFile(filePath_model_transpose, filePath_model_res);
        long endTime = System.currentTimeMillis();
        System.out.println("程序运行时间: " + (endTime - startTime) + "ms");
    }

    /**
     *使用jsonParser 提取数据并写入中间文件
     */
    public static void convert2traindata() throws IOException {

        JsonFactory jasonFactory = new JsonFactory();
        JsonParser jsonParser = null;

        PrintWriter writer_model = new PrintWriter(new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(filePath_model)), "UTF-8"));
        PrintWriter writer_index = new PrintWriter(new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(filePath_model_index)), "UTF-8"));
        PrintWriter writer_res = new PrintWriter(new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(filePath_model_res)), "UTF-8"));
        jsonParser = jasonFactory.createJsonParser(new File(filePath_origin));
        jsonParser.nextToken();
        while (jsonParser.nextToken() != JsonToken.NOT_AVAILABLE) {
            String fieldname = jsonParser.getCurrentName();
            if (jsonParser.nextToken() == null || fieldname == null) {
                jsonParser.close();
                writer_model.close();
                break;
            }
            int filedIndex = 0;
            //读取stdNames 要求写入的字段,直接放入結果文件中
            if (fieldname != null && fieldname.equals("stdNames")) {
                writer_res.append("ts").append(",");
                while (jsonParser.currentToken() != JsonToken.END_ARRAY) {
                    if (filedIndex == 16) {
                        writer_res.append(jsonParser.getText());
                    } else {
                        writer_res.append(jsonParser.getText()).append(",");
                    }
                    filedIndex++;
                    jsonParser.nextToken();
                }
                writer_res.write("\n");
                writer_res.close();
            }
            //读取times数据
            int transposeIndex = 0;
            if (fieldname != null && fieldname.equals("times")) {
                jsonParser.nextToken();
                while (jsonParser.currentToken() != JsonToken.END_ARRAY) {
                    transposeIndex++;
                    writer_model.append(new BigDecimal(jsonParser.getText()).toPlainString()).append(",");
                    jsonParser.nextToken();
                }
                //生成索引文件
                for (int i = 0; i < transposeIndex; i++) {
                    writer_index.append(String.valueOf(i)).append(",");
                }
                writer_index.append("\n");
                writer_index.close();
            }
            //读取dataMatrix数据
            if (fieldname != null && fieldname.equals("dataMatrix")) {
                writer_model.append("\n");
                while (jsonParser.currentToken() != JsonToken.END_OBJECT) {
                    if (jsonParser.getText() != "[") {
                        if (jsonParser.getText() == "]") {
                            writer_model.append("\n");
                        } else {
                            writer_model.append(jsonParser.getText()).append(",");
                        }
                    }
                    jsonParser.nextToken();
                }
                writer_model.close();
            }
        }
        jsonParser.close();
    }
    /**
     * 合并文件和索引
     *
     * @param file1
     * @param file2
     * @throws IOException
     */
    public static void mergeFile(String file1, String file2) throws IOException {
        BufferedReader inputStream = null;
        BufferedWriter outputStream = null;
        inputStream = new BufferedReader(new FileReader(file1));
        FileWriter filewriter = new FileWriter(new File(file2), true);
        outputStream = new BufferedWriter(filewriter);
        String count;
        while ((count = inputStream.readLine()) != null) {
            if (count != "" && count.length() > 17) {
                outputStream.write(count);
                outputStream.write("\n");
            }
        }
        outputStream.flush();
        outputStream.close();
        inputStream.close();
        new File(file1).delete();
    }

    /**
     * 矩阵转置
     *
     * @param filePath
     * @throws IOException
     */
    public static void transpose(String filePath) throws IOException {

        DataFrame df = null;
        df = DataFrame.readCsv(filePath,",",DataFrame.NumberDefault.LONG_DEFAULT);
        DataFrame<String> df3 = df.transpose();
        System.out.println(df3.length());
        for (int i=0;i<df3.length()-1;i++){
            String value=new BigDecimal(String.valueOf(df3.get(i,0))).toPlainString();
            df3.set(i,0,value);
        }
        df3.writeCsv(filePath_model_transpose);
        new File(filePath).delete();
    }

    /**
     * 生成结果文件
     *
     * @param file1
     * @param file2
     * @throws IOException
     */
    public static void printResFile(String file1, String file2) throws IOException {

        BufferedReader inputStream = null;
        BufferedWriter outputStream = null;
        FileWriter filewriter = null;
        inputStream = new BufferedReader(new FileReader(file1));
        filewriter = new FileWriter(new File(file2), true);
        outputStream = new BufferedWriter(filewriter);
        String count;
        int lineCode = 0;
        while ((count = inputStream.readLine()) != null) {

            if (count != "" && count.length() > 17 && lineCode > 0) {
                outputStream.write(count);
                outputStream.write("\n");
            }
            lineCode++;
        }
        outputStream.flush();
        outputStream.close();
        inputStream.close();
        new File(file1).delete();
    }
}

测试后3000行的json需要0.3S左右。
3w行的大约2.8S执行完。效率应对基本需求完全够用~
在这里插入图片描述

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

王子健121

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值