influxdb在java中的应用

influxdb的安装和语法这边不加以说明,本文主要讲解influxdb在java大数据量存储的应用。

该项目的场景是存储设备的历史数据。(针对时序数据库influxdb是个比较好的选择)

一。流程图

项目启动流程:

存储流程如下:

二,数据库及表设计

数据库:

rt_necp_history 短期数据库

key_necp_history 长期数据库

表:

AI_RESULT 存储数字类型的表(double,int),方面以后统计计算

MIX_RESULT 混合表(存储字符串类型)

字段:tid 数据的唯一标识(对应平台的设备的测点) value 值 ,time 创建时间

总共三个字段即将设备的每个测点当做一条数据存储,该种方案可能会造成influxdb的series数量庞大。

实际压测过:内存8g,能存储的数据量在5000万条左右,还是可以满足一定的需求。

若对数据量要求较大时建议用一个设备作为一条数据,表的字段将会比较多。

实际压测过:内存8g,表字段在500个左右,能存储数据量在1000万左右。

 

存储策略:

LONG_YEAR 长期数据
String commandLong = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                keyPolicy, keyDataBase, "1300w", 1); //25年  一年有52周,52*25 =1300w
        this.query(commandLong,keyDataBase);

 

ONE_MONTH 短期数据
String command = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                rtPolicy, rtDataBase, "30d", 1); //30天
        this.query(command,rtDataBase);

分为两种策略分别设置为长期数据库和短期数据库的默认存储策略。

三,实践。

1.在influxdb配置文件中打开这个两个参数并设置为0(批量存储无限)。

配置文件中的参数看实际情况打开,具体可参考如下:

https://www.cnblogs.com/mafeng/p/6848166.html

2.pom.xml中导入jar包依赖

3.influxdb工具类

@Component
public class InfluxBaseDao implements InitializingBean {
    private static InfluxDB influxDB;
    private static final int maxActiveNum = 1000;//最大初始化时连接数
    private static BlockingQueue<InfluxDB> connPool;
    @Value("${spring.influx.user}")
    private String username;// 用户名
    @Value("${spring.influx.password}")
    private String password;// 密码
    @Value("${spring.influx.url}")
    private String openurl;// 连接地址
    @Value("${spring.influx.dataBase}")
    private String database;// 数据库
    @Value("${spring.influx.table.analog}")
    private String analogtable;//数值表
    @Value("${spring.influx.table.mix}")
    private String mixtable;//混合表(存字符串等数据)

    void initPool() {
        connPool = new LinkedBlockingQueue<>(maxActiveNum);
        while (connPool.size()<maxActiveNum) {
            if (influxDbBuild() != null) {
                try {
                    connPool.put(influxDbBuild());
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            } else {
                break;
            }
        }
    }

    @Override
    public void afterPropertiesSet() throws Exception {
        initPool();
        influxDbBuild();
        //createRetentionPolicy();
    }

    /**
     * 连接时序数据库;获得InfluxDB
     **/
    @SuppressWarnings("deprecation")
    public InfluxDB influxDbBuild() {
        try {
            if (influxDB == null) {
                if (openurl != null) {
                    influxDB = InfluxDBFactory.connect(openurl, username, password);
                    if (influxDB != null) {
                        boolean flag = influxDB.databaseExists(database);
                        if (!flag) {
                            influxDB.createDatabase(database);
                            createRetentionPolicy(); //初始化创建保存策略
                        }
                    }
                }

            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return influxDB;
    }

    public InfluxDB getInfluxDB() {
        if (connPool.size() > 0) {
            InfluxDB conn = null;
            try {
                conn = connPool.take();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            if (conn == null) {
                initPool();
               // System.out.println(openurl + "--" + database);
            }
            return conn;
        } else {
            return influxDbBuild();
        }
    }

    public static void setInfluxDB(InfluxDB influxDB) {
        InfluxBaseDao.influxDB = influxDB;
    }

    /**
     * 设置数据保存策略
     * defalut 策略名 /database 数据库名/ 1095d 数据保存时限3年/ 1  副本个数为1/ 结尾DEFAULT 表示 设为默认的策略
     */
    public void createRetentionPolicy() {
        String command = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                "defalut", database, "1095d", 1); //3年
        this.query(command);
    }

    public void createRetentionPolicyDefault() {
        String command = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                "defalut", database, "0s", 1); //无限制
        this.query(command);
    }

    //删除策略
    public void dropRetentionPolicyDefault() {
        String command = String.format("DROP RETENTION POLICY \"%s\" ON \"%s\"",
                "defalut", database);
        this.query(command);
    }

    /**
     * 查询
     *
     * @param command 查询语句
     * @return
     */
    public QueryResult query(String command) {
        return getInfluxDB().query(new Query(command, database));
    }

    public QueryResult query(String command, String database) {
        return getInfluxDB().query(new Query(command, database));
    }

    /**
     * 插入
     *
     * @param measurement 表
     * @param tags        标签
     * @param fields      字段
     */
    public void insert(String measurement, Map<String, String> tags, Map<String, Object> fields) {
        Point.Builder builder = Point.measurement(measurement);
        builder.tag(tags);
        builder.fields(fields);

        getInfluxDB().write(database, "", builder.build());
    }

    public void insert(String measurement, Map<String, String> tags, Map<String, Object> fields, long time) {
        Point.Builder builder = Point.measurement(measurement);
        builder.tag(tags);
        builder.fields(fields);
        builder.time(time, TimeUnit.MILLISECONDS);
        getInfluxDB().write(database, "", builder.build());
    }

    public void insert(String measurement, Map<String, Object> fields) {
        Point.Builder builder = Point.measurement(measurement);
        builder.fields(fields);

        getInfluxDB().write(database, "", builder.build());
    }

    //where查询语句拼接
    public String joinWhere(List<String> tids, Date startTime, Date endTime) {
        String startTimeStr = IfdbDateUtils.formatDateByNum(startTime, -8); //时区转换
        String endTimeStr = IfdbDateUtils.formatDateByNum(endTime, -8);
        String command = "where 1=1 ";
        if (tids != null && tids.size() > 0) {
            command = command + " and (";
            for (String tid : tids) {
                command = command + "tid = '" + tid + "' or ";
            }
            command = command.substring(0, command.length() - 3);
            command = command + " ) ";
        }

        if (StringUtils.isNotBlank(startTimeStr)) {
            command = command + "and  time >='" + startTimeStr + "' ";
        }
        if (StringUtils.isNotBlank(endTimeStr)) {
            command = command + "and time <= '" + endTimeStr + "' ";
        }

        return command;
    }

    //批量插入历史数据
    public void batchInstertHistoryData(List<HistoryData> rtDList) {
        try {
            //声明influxdb批量插入数据对象
            BatchPoints batchPoints = BatchPoints.database(database)
                    .consistency(InfluxDB.ConsistencyLevel.ALL).build();
            for (int i = 0; i < rtDList.size(); i++) {
                HistoryData rtd = rtDList.get(i);
                Calendar cld = Calendar.getInstance();
                if (IfdbDateUtils.isDouble(rtd.getValue())) {//判断是否为数值(由于业务层之前设计没有分数值和字符串的区分,导致这边无法判断value是什么类型来存不同的表)
                    //创建单条数据对象——表名
                    Point point = Point
                            .measurement(analogtable)
                            //tag属性——只能存储String类型
                            .tag("tid", rtd.getDevicePointCode())
                            //field存储数据
                            .addField("value", Double.valueOf(rtd.getValue()))
                            //  .time(cld.getTimeInMillis(), TimeUnit.MILLISECONDS)
                            .build();
                    //将单条数据存储到集合中
                    batchPoints.point(point);
                } else {
                    //创建单条数据对象——表名
                    Point point = Point
                            .measurement(mixtable)
                            //tag属性——只能存储String类型
                            .tag("tid", rtd.getDevicePointCode())
                            //field存储数据
                            .addField("value", rtd.getValue())
                            //  .time(cld.getTimeInMillis(), TimeUnit.MILLISECONDS)
                            .build();
                    //将单条数据存储到集合中
                    batchPoints.point(point);
                }
            }
            getInfluxDB().write(batchPoints);
            getInfluxDB().close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 删除
     *
     * @param command 删除语句
     * @return 返回错误信息
     */
    public String deleteMeasurementData(String command) {
        QueryResult result = getInfluxDB().query(new Query(command, database));
        return result.getError();
    }

    //获取指定表的分页数据
    public Map<String, Object> queryDataDataByOffset(String table, String command, int pageSize, int pageNum) {
        Map<String, Object> rsMap = new HashMap<String, Object>();
        command = "select * from " + table + " " + command;
        String sumSQL = " select count(value) as value,time as t from ( " + command + " ) ";
        List<CommonData> sumData = queryRailTransitData(sumSQL);
        int total = 0;
        if (sumData != null && sumData.size() > 0) {
            CommonData rd = sumData.get(0);
            total = Integer.parseInt(new java.text.DecimalFormat("0").format(Double.valueOf(rd.getValue()))); //Double转Internet
            rsMap.put("total", total);
        } else {
            rsMap.put("total", 0);
        }
        String offsetSql = command + "ORDER BY time  desc  limit " + pageSize + " OFFSET " + (pageNum - 1) * pageSize;
        List<CommonData> offsetData = queryRailTransitData(offsetSql);
        rsMap.put("rows", offsetData);
        return rsMap;
    }

    //获取历史数据
    public List<CommonData> queryHistoryData(String table, String command) {
        command = "select * from " + table + " " + command;
        String offsetSql = command + " ORDER BY time  desc ";
        List<CommonData> offsetData = queryRailTransitData(offsetSql);
        return offsetData;
    }

    //根据时间、tid获取数值表每日的对应值,
    public List<CommonData> queryDayData(String command, String type) {
        java.text.DecimalFormat df = new java.text.DecimalFormat("#0.00");
        String statics = "mean"; //统计方式,默认取平均
        String[] staticMap = {"max", "min", "mean", "last"};
        statics = staticMap[Integer.valueOf(type) - 1];//表格驱动大法。。。。

        command = "select " + statics + "(value) as value from " + analogtable + " " + command;
        String offsetSql = command + " group by time(8h),* fill(0) ";//由于influxdb存入的时间与现在差8个小时,如果用1天来求平均会导致求出每天数据有误(出现偏移),这边用8小时作为求平均
        List<CommonData> offsetData = queryRailTransitData(offsetSql);
        //将每3个数据累计成一个数据,即为一天的数据(一天3个8小时)
        List<CommonData> offsetDataResult = new ArrayList<>();
        Double sum = 0.0;
        Integer flag = 0;

        Double[] ereryGroup = {0.0, 0.0, 0.0};  //取最大,最小值的数组
        for (int i = 0; i < offsetData.size(); i++) {
            if (!"0.0".equals(offsetData.get(i).getValue()) && !"0".equals(offsetData.get(i).getValue())) {
                sum += Double.valueOf(offsetData.get(i).getValue());
                ereryGroup[flag] = Double.valueOf(offsetData.get(i).getValue());
                flag++;
            }

            if ((i + 1) % 3 == 0) { //根据一天3个8小时,取一天的对应值(平均、最大、最小、瞬时)
                CommonData temp = new CommonData();
                temp.setTid(offsetData.get(i).getTid());
                temp.setTime(offsetData.get(i).getTime());

                switch (ReportStatisticsOptionEnum.getInstance(type)) {
                    case MAX:
                        Double tempmax = Collections.max(Arrays.asList(ereryGroup));
                        temp.setValue(flag == 0 ? (sum + "") : df.format(new BigDecimal(tempmax)));
                        break;
                    case MIN:
                        Double tempmin = Collections.min(Arrays.asList(ereryGroup));
                        temp.setValue(flag == 0 ? (sum + "") : df.format(new BigDecimal(tempmin)));
                        break;
                    case NOW:
                        temp.setValue(flag == 0 ? (sum + "") : df.format(new BigDecimal(offsetData.get(i).getValue())));
                        break;
                    default:
                        temp.setValue(flag == 0 ? (sum + "") : df.format(new BigDecimal(sum / 3)));
                        break;
                }
                offsetDataResult.add(temp);
                sum = 0.0;
                flag = 0;
                Double[] ereryGroupTemp = {0.0, 0.0, 0.0};  //清空
                ereryGroup = ereryGroupTemp;
            }
        }
        return offsetDataResult;
    }

    //根据时间、tid获取数值表每小时的对应值,
    public List<CommonData> queryHourData(String command, String type) {
        String statics = "mean"; //统计方式,默认取平均
        String[] staticMap = {"max", "min", "mean", "last"};
        statics = staticMap[Integer.valueOf(type) - 1];//
        command = "select " + statics + "(value) as value from " + analogtable + " " + command;
        String offsetSql = command + " group by time(1h),* fill(0) ";
        List<CommonData> offsetData = queryRailTransitData(offsetSql);
        return offsetData;
    }

    public List<CommonData> queryRailTransitData(String command) {
        // System.out.println("sql:"+command);
        QueryResult rs = getInfluxDB().query(new Query(command, database));
        List<CommonData> lists = new ArrayList<CommonData>();
        lists = dealResult(rs);
        return lists;
    }

    public List<CommonData> dealResult(QueryResult rs) {
        List<CommonData> lists = new ArrayList<CommonData>();
        if (rs != null && rs.getResults() != null && rs.getResults().get(0) != null && rs.getResults().get(0).getSeries() != null) {
            for (QueryResult.Result result : rs.getResults()) {

                List<QueryResult.Series> series = result.getSeries();
                for (QueryResult.Series serie : series) {
//					Map<String, String> tags = serie.getTags();
                    List<List<Object>> values = serie.getValues();
                    List<String> columns = serie.getColumns();
                    Map<String, String> tags = serie.getTags();
                    lists.addAll(getQueryData(columns, values, tags));
                }
            }
        }

        return lists;
    }

    /***整理列名、行数据***/
    private List<CommonData> getQueryData(List<String> columns, List<List<Object>> values, Map<String, String> tags) {
        List<CommonData> lists = new ArrayList<CommonData>();
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
        SimpleDateFormat sdf1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        boolean hadTagId = false;
        String sname = "";
        if (!columns.contains("tid") && tags != null) {
            hadTagId = true;
            sname = tags.get("tid");
        }
        for (List<Object> list : values) {
            CommonData info = new CommonData();
            for (int i = 0; i < list.size(); i++) {

                String propertyName = columns.get(i);//字段名
                Object value = list.get(i);//相应字段值
                if (propertyName != null && "time".equals(propertyName)) {
                    String time = IfdbDateUtils.UTCToCSTNew(value.toString(), "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");

                    info.setTime(time);
                } else if (propertyName != null && "tid".equals(propertyName)) {
                    info.setTid((String) value);
                    hadTagId = false;
                } else if (propertyName != null && "value".equals(propertyName)) {
                    info.setValue(value.toString());
                }
                if (hadTagId) {
                    info.setTid(sname);
                }
            }

            lists.add(info);
        }

        return lists;
    }

    public String getUsername() {
        return username;
    }

    public void setUsername(String username) {
        this.username = username;
    }

    public String getPassword() {
        return password;
    }

    public void setPassword(String password) {
        this.password = password;
    }

    public String getOpenurl() {
        return openurl;
    }

    public void setOpenurl(String openurl) {
        this.openurl = openurl;
    }

    public String getDatabase() {
        return database;
    }

    public void setDatabase(String database) {
        this.database = database;
    }

    public String getAnalogtable() {
        return analogtable;
    }

    public void setAnalogtable(String analogtable) {
        this.analogtable = analogtable;
    }

    public String getMixtable() {
        return mixtable;
    }

    public void setMixtable(String mixtable) {
        this.mixtable = mixtable;
    }

}

四。难点。

由于influxdb插入数据时间默认与实际时间相差8小时,业务上需要自己做时间转换

public static String formatDateByNum(Date time, Integer num) {
        if (time == null) {
            return "";
        }
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        Date date = null;
        Calendar cld = Calendar.getInstance();
        cld.setTime(time);
        cld.add(Calendar.HOUR, num);
        date = cld.getTime();
        return sdf.format(date);
    }

单纯读取数据可能只需要做时间减8小时即可,但是当需要做统计平均值,最大值等数据时,需要将influxdb实际计算出的值按一天3个8小时拆分计算(group by time惹的祸)

@Repository
public class HistoryHandleDao {
    private static Map<String,SDTPoints> statusMap = new HashMap<>();
    private static  double accuracyE = 0.5;//压缩精度默认0.5,控制压缩数据差值在1-2左右

    @Value("${spring.influx.table.analog}")
    private String analogtable;//数值表
    @Value("${spring.influx.table.mix}")
    private String mixtable;//混合表(存字符串等数据)

    @Autowired
    private InfluxBaseDao influxBaseOurDao;

    //插入数据解析 --直接插入
    public void analysisData(List<String> sourceDataList){
        List<HistoryData> analogList = new ArrayList<>();

        int length = sourceDataList.size();
        for (int i = 0; i < length; i++) {
            JSONArray dataArray = JSONArray.parseArray(sourceDataList.get(i));
            for (int j = 0; j < dataArray.size(); j++) {
                JSONObject jsonObject = dataArray.getJSONObject(j);
                HistoryData historyData = JSON.toJavaObject(jsonObject, HistoryData.class);

                analogList.add(historyData);
            }
        }
        bathSaveDataToInfluxDb(analogList); //插入数据
    }

    //插入数据解析 ---SDT旋转门压缩算法插入
    public void analysisSDTData(List<String> sourceDataList){
        List<HistoryData> analogList = new ArrayList<>();
        List<HistoryData> analogSDTList = new ArrayList<>(); //压缩后的数据
        int length = sourceDataList.size();
        for(int i=0;i<length;i++){
            JSONArray dataArray = JSONArray.parseArray(sourceDataList.get(i));
            for (int j=0;j<dataArray.size();j++){
                JSONObject jsonObject = dataArray.getJSONObject(j);
                HistoryData historyData = JSON.toJavaObject(jsonObject,HistoryData.class);
                historyData.setTime(new Date().getTime());
                analogList.add(historyData);
            }
        }
        analogSDTList = SDTcompress(analogList); //
        bathSaveDataToInfluxDb(analogSDTList); //插入数据
    }

    //批量存储数据
    public void bathSaveDataToInfluxDb(List<HistoryData> rtDList) {
        int dNum = rtDList.size();
        int maxDataNum = 300000;
        // 循环删除次数
        int num = 1;
        // 求出删除次数
        if (dNum % maxDataNum == 0) {
            num = dNum / maxDataNum;
        } else {
            num = dNum / maxDataNum + 1;
        }
        // 循环删除
        for (int i = 1; i <= num; i++) {
            // 要删除的sname list
            List<HistoryData> strList = new ArrayList<HistoryData>();
            // 每次删除的开始值和结束值
            int start = (i - 1) * maxDataNum;
            int end = i * maxDataNum;
            if (end <= dNum) {
                end = i * maxDataNum;
            } else {
                end = dNum;
            }
            // 获取要删除的sname list
            strList = rtDList.subList(start, end);

            influxBaseOurDao.batchInstertHistoryData(strList);
        }
    }

    //禁用保存策略
    public void forbid() {
        influxBaseOurDao.dropRetentionPolicyDefault();
        influxBaseOurDao.createRetentionPolicyDefault();
    }

    //启用保存策略
    public void use() {
        influxBaseOurDao.dropRetentionPolicyDefault();
        influxBaseOurDao.createRetentionPolicy();
    }

    //SDT旋转门算法压缩数据
    public List<HistoryData> SDTcompress(List<HistoryData> originData){
        List<HistoryData> listSDT=new ArrayList<>();
        if (originData.size() <= 0)
            return listSDT;
        for(HistoryData p : originData){
            if(IfdbDateUtils.isDouble(p.getValue())) { //当数值是double时才进行压缩,否则不压缩
                double nowUp, nowDown;//当前数据的上下斜率
                SDTPoints status = statusMap.get(p.getDevicePointCode());
                if(status==null){
                    status = new SDTPoints();
                    status.setLastReadData(p);
                    status.setLastStoredData(status.getLastReadData());
                    listSDT.add(status.getLastReadData());
                }
                status.setCurrentData(p);
                if((p.getTime() - status.getLastStoredData().getTime())% (1000 * 24 * 60 * 60) / (1000 * 60 * 60)<1){ //当前时间与上次插入时间相差1个小时内,则继续
                    nowUp = (Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) - accuracyE)/(p.getTime() - status.getLastStoredData().getTime());
                    if (nowUp > status.getUpGate())
                        status.setUpGate(nowUp);
                    nowDown = (Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) + accuracyE)/(p.getTime() - status.getLastStoredData().getTime());
                    if (nowDown < status.getDownGate())
                        status.setDownGate(nowDown);
                    if (status.getUpGate() >= status.getDownGate()){
                        listSDT.add(status.getLastReadData());//保存前一个点
                        status.setLastStoredData(status.getLastReadData()) ;//修改最近保存的点
                        status.setUpGate((Double.valueOf(p.getValue())-Double.valueOf(status.getLastStoredData().getValue())-accuracyE)/ (p.getTime() - status.getLastStoredData().getTime()));
                        status.setDownGate((Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) + accuracyE) / (p.getTime() - status.getLastStoredData().getTime()));
                    }
                }else{ //大于1个小时直接保存
                    listSDT.add(status.getLastReadData());//保存前一个点
                    status.setLastStoredData(status.getLastReadData()) ;//修改最近保存的点
                    status.setUpGate((Double.valueOf(p.getValue())-Double.valueOf(status.getLastStoredData().getValue())-accuracyE)/ (p.getTime() - status.getLastStoredData().getTime()));
                    status.setDownGate((Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) + accuracyE) / (p.getTime() - status.getLastStoredData().getTime()));
                }
                status.setLastReadData(p);
                statusMap.put(p.getDevicePointCode(),status);
            }else{
                listSDT.add(p);//直接保存
            }
        }
        return listSDT;
    }

    /**
     * @param tids     对应业务的唯一标示, startTime开始查询时间 ,endTime结束时间
     * @param pageSize
     * @param pageNum
     * @return 参数
     * Map<String,Object>    返回类型
     * @throws
     * @Title: queryDataDataForPage
     * @Description: 获取分页数据(包含数据表和字符表)
     */
    public Map<String, Object> queryDataDataForPage(List<String> tids, Date startTime, Date endTime, int pageSize, int pageNum) {
        Map<String, Object> rsMap = new HashMap<>();

        String command = influxBaseOurDao.joinWhere(tids, startTime, endTime);
        rsMap = influxBaseOurDao.queryDataDataByOffset(analogtable, command, pageSize, pageNum);
        if (rsMap.get("total") != null && (Integer) rsMap.get("total") != 0) {
            return rsMap;
        }
        rsMap = influxBaseOurDao.queryDataDataByOffset(mixtable, command, pageSize, pageNum);
        return rsMap;
    }

    /**
     * @param tids 对应业务的唯一标示, startTime开始查询时间 ,endTime结束时间
     * @return 参数
     * List<CommonData>   返回类型
     * @throws
     * @Title: queryDataData
     * @Description: 获取历史数据(包含数据表和字符表)
     */
    public List<CommonData> queryDataData(List<String> tids, Date startTime, Date endTime) {
        List<CommonData> commonDataList = new ArrayList<>();
        String command = influxBaseOurDao.joinWhere(tids, startTime, endTime);

        commonDataList = influxBaseOurDao.queryHistoryData(analogtable, command);
        if (commonDataList != null && commonDataList.size() > 0) {
            return commonDataList;
        }
        commonDataList = influxBaseOurDao.queryHistoryData(mixtable, command);
        return commonDataList;
    }

    /**
     * @param tids      对应业务的唯一标示 tid集合(查设备测点时,tid为device_point_code;查能效时,tid为:mc_参数id)
     * @param startTime 开始时间  格式必须是 "2019-02-02 00:00:00" 时分秒必须为00:00:00 否则数据会有问题!
     * @param endTime   结束时间    格式必须是 "2019-02-08 23:59:59" 时分秒必须为23:59:59 否则数据会有问题!
     * @param type      1:最大值 2:最小值 3:平均值 4:瞬时值
     * @return 参数
     * List<CommonData>   返回类型
     * @throws
     * @Title: queryDayData
     * @Description: 根据tid和时间范围获取数据表的每天对应值(平均值、最大值、最小值、瞬时值),
     * 由于influxdb存入的时间与现在差8个小时,如果用1天来求对应值会导致求出每天数据有误(出现偏移),这边用8小时作为求(平均值、最大值、最小值、瞬时值),
     * 求出后在将每3个数据再求对应值获得一天的(平均值、最大值、最小值、瞬时值),
     */
    public List<CommonData> queryDayData(List<String> tids, Date startTime, Date endTime, String type) {
        List<CommonData> commonDataList = new ArrayList<>();
        String command = influxBaseOurDao.joinWhere(tids, startTime, endTime);

        commonDataList = influxBaseOurDao.queryDayData(command, type);
        return commonDataList;
    }

    //根据tid和时间范围获取数据表的每小时对应值(平均值、最大值、最小值、瞬时值),无需转换,直接取值
    public List<CommonData> queryHourData(List<String> tids, Date startTime, Date endTime, String type) {
        List<CommonData> commonDataList = new ArrayList<>();
        String command = influxBaseOurDao.joinWhere(tids, startTime, endTime);

        commonDataList = influxBaseOurDao.queryHourData(command, type);
        return commonDataList;
    }
}

五。数据压缩

由于工业上可能产生很多无用的历史数据,需要将数据压缩后进行存储,业务需求也只需要一个小时至少存一条数据即可,这边引入旋转门压缩算法。

旋转门算法是一种比较快速的线性拟合算法,常常用于实时数据库中对数据进行压缩,使存储容量大大的减少。在实时数据库中,数据通常具有如下特点:1. 数据采集量大。2. 数据临近度高。

//SDT旋转门算法压缩数据
    public List<HistoryData> SDTcompress(List<HistoryData> originData){
        List<HistoryData> listSDT=new ArrayList<>();
        if (originData.size() <= 0)
            return listSDT;
        for(HistoryData p : originData){
            if(IfdbDateUtils.isDouble(p.getValue())) { //当数值是double时才进行压缩,否则不压缩
                double nowUp, nowDown;//当前数据的上下斜率
                SDTPoints status = statusMap.get(p.getDevicePointCode());
                if(status==null){
                    status = new SDTPoints();
                    status.setLastReadData(p);
                    status.setLastStoredData(status.getLastReadData());
                    listSDT.add(status.getLastReadData());
                }
                status.setCurrentData(p);
                if((p.getTime() - status.getLastStoredData().getTime())% (1000 * 24 * 60 * 60) / (1000 * 60 * 60)<1){ //当前时间与上次插入时间相差1个小时内,则继续
                    nowUp = (Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) - accuracyE)/(p.getTime() - status.getLastStoredData().getTime());
                    if (nowUp > status.getUpGate())
                        status.setUpGate(nowUp);
                    nowDown = (Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) + accuracyE)/(p.getTime() - status.getLastStoredData().getTime());
                    if (nowDown < status.getDownGate())
                        status.setDownGate(nowDown);
                    if (status.getUpGate() >= status.getDownGate()){
                        listSDT.add(status.getLastReadData());//保存前一个点
                        status.setLastStoredData(status.getLastReadData()) ;//修改最近保存的点
                        status.setUpGate((Double.valueOf(p.getValue())-Double.valueOf(status.getLastStoredData().getValue())-accuracyE)/ (p.getTime() - status.getLastStoredData().getTime()));
                        status.setDownGate((Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) + accuracyE) / (p.getTime() - status.getLastStoredData().getTime()));
                    }
                }else{ //大于1个小时直接保存
                    listSDT.add(status.getLastReadData());//保存前一个点
                    status.setLastStoredData(status.getLastReadData()) ;//修改最近保存的点
                    status.setUpGate((Double.valueOf(p.getValue())-Double.valueOf(status.getLastStoredData().getValue())-accuracyE)/ (p.getTime() - status.getLastStoredData().getTime()));
                    status.setDownGate((Double.valueOf(p.getValue()) - Double.valueOf(status.getLastStoredData().getValue()) + accuracyE) / (p.getTime() - status.getLastStoredData().getTime()));
                }
                status.setLastReadData(p);
                statusMap.put(p.getDevicePointCode(),status);
            }else{
                listSDT.add(p);//直接保存
            }
        }
        return listSDT;
    }

至此基本满足业务需求。

  • 2
    点赞
  • 12
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值