InfluxDB Java 客户端测试数据插入效率

为了快速测试InfluxDB工具类借鉴使用网上封装好的,做了一些简单的优化处理。参考:https://blog.csdn.net/x541211190/article/details/83216589?depth_1-utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromBaidu-4&utm_source=distribute.pc_relevant.none-task-blog-BlogCommendFromBaidu-4

目录

InfluxDB连接工具

 主测试方法

插入测试 和简单压测


InfluxDB连接工具

package com.xxx.dataservice.xhtdataservice.util;

import org.influxdb.InfluxDB;
import org.influxdb.InfluxDBFactory;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.Pong;
import org.influxdb.dto.Query;
import org.influxdb.dto.QueryResult;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
 * InfluxDB数据库连接操作类
 */
public class InfluxDBConnection {

    // 用户名
    private String username;
    // 密码
    private String password;
    // 连接地址
    private String openurl;
    // 数据库
    private String database;
    // 保留策略
    private String retentionPolicy;
    // 数据库
    private InfluxDB influxDB;

    /**
     * 连接构造
     * @param username
     * @param password
     * @param openurl
     * @param database
     * @param retentionPolicy
     */
    public InfluxDBConnection(String username, String password, String openurl, String database,
                              String retentionPolicy) {
        this.username = username;
        this.password = password;
        this.openurl = openurl;
        this.database = database;
        this.retentionPolicy = retentionPolicy == null || retentionPolicy.equals("") ? "autogen" : retentionPolicy;
        influxDbBuild();
    }

    /**
     * 创建数据库
     *
     * @param dbName
     */
    @SuppressWarnings("deprecation")
    public void createDB(String dbName) {
       if(!influxDB.databaseExists(dbName)){
           influxDB.createDatabase(dbName);
       }
    }

    /**
     * 删除数据库
     *
     * @param dbName
     */
    @SuppressWarnings("deprecation")
    public void deleteDB(String dbName) {
        if(influxDB.databaseExists(dbName)){
            influxDB.deleteDatabase(dbName);
        }
    }

    /**
     * 测试连接是否正常
     *
     * @return true 正常
     */
    public boolean ping() {
        boolean isConnected = false;
        Pong pong;
        try {
            pong = influxDB.ping();
            if (pong != null) {
                isConnected = true;
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return isConnected;
    }

    /**
     * 连接时序数据库 ,若不存在则创建
     *
     * @return
     */
    public InfluxDB influxDbBuild() {
        if (influxDB == null) {
            influxDB = InfluxDBFactory.connect(openurl, username, password);
        }
        try {
            // if (!influxDB.databaseExists(database)) {
            // influxDB.createDatabase(database);
            // }
        } catch (Exception e) {
            // 该数据库可能设置动态代理,不支持创建数据库
            // e.printStackTrace();
        } finally {
            influxDB.setRetentionPolicy(retentionPolicy);
        }
        influxDB.setLogLevel(InfluxDB.LogLevel.NONE);
        return influxDB;
    }

    /**
     * 创建自定义保留策略
     *
     * @param policyName
     *            策略名
     * @param duration
     *            保存天数
     * @param replication
     *            保存副本数量
     * @param isDefault
     *            是否设为默认保留策略
     */
    public void createRetentionPolicy(String policyName, String duration, int replication, Boolean isDefault) {
        String sql = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s ", policyName,
                database, duration, replication);
        if (isDefault) {
            sql = sql + " DEFAULT";
        }
        this.query(sql);
    }

    /**
     * 创建默认的保留策略
     *
     * @note 策略名:default,保存天数:30天,保存副本数量:1
     *            设为默认保留策略
     */
    public void createDefaultRetentionPolicy() {
        String command = String.format("CREATE RETENTION POLICY \"%s\" ON \"%s\" DURATION %s REPLICATION %s DEFAULT",
                "default", database, "30d", 1);
        this.query(command);
    }

    /**
     * 查询
     *
     * @param command
     *            查询语句
     * @return
     */
    public QueryResult query(String command) {
        return influxDB.query(new Query(command, database));
    }

    /**
     * 插入
     *
     * @param measurement
     *            表
     * @param tags
     *            标签
     * @param fields
     *            字段
     */
    public void insert(String measurement, Map<String, String> tags, Map<String, Object> fields, long time,
                       TimeUnit timeUnit) {
        Point.Builder builder = Point.measurement(measurement);
        builder.tag(tags);
        builder.fields(fields);
        if (0 != time) {
            builder.time(time, timeUnit);
        }
        influxDB.write(database, retentionPolicy, builder.build());
    }

    /**
     * 批量写入测点
     *
     * @param batchPoints
     */
    public void batchInsert(BatchPoints batchPoints) {
        influxDB.write(batchPoints);
        // influxDB.enableGzip();
        // influxDB.enableBatch(2000,100,TimeUnit.MILLISECONDS);
        // influxDB.disableGzip();
        // influxDB.disableBatch();
    }

    /**
     * 批量写入数据
     *
     * @param database
     *            数据库
     * @param retentionPolicy
     *            保存策略
     * @param consistency
     *            一致性
     * @param records
     *            要保存的数据(调用BatchPoints.lineProtocol()可得到一条record)
     */
    public void batchInsert(final String database, final String retentionPolicy, final InfluxDB.ConsistencyLevel consistency,
                            final List<String> records) {
        influxDB.write(database, retentionPolicy, consistency, records);
    }

    /**
     * 删除
     *
     * @param command
     *            删除语句
     * @return 返回错误信息
     */
    public String deleteMeasurementData(String command) {
        QueryResult result = influxDB.query(new Query(command, database));
        return result.getError();
    }

    /**
     * 关闭数据库
     */
    public void close() {
        influxDB.close();
    }

    /**
     * 构建Point
     *
     * @param measurement
     * @param time
     * @param fields
     * @return
     */
    public Point pointBuilder(String measurement, long time, Map<String, String> tags, Map<String, Object> fields) {
        Point point = Point.measurement(measurement).time(time, TimeUnit.MILLISECONDS).tag(tags).fields(fields).build();
        return point;
    }

}

 主测试方法

package com.xxx.dataservice.xhtdataservice;

import com.xxx.dataservice.xhtdataservice.util.InfluxDBConnection;
import org.influxdb.InfluxDB;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.influxdb.dto.QueryResult;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

public class InfluxDBTest {

    private Executor executor = Executors.newFixedThreadPool(20);

    private static final InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin", "http://192.168.1.111:8086", "db-test", "hour");

    static{
        // 检查创建数据库
        influxDBConnection.createDB("db-test");
        // 创建保存策略
        // influxDBConnection.createDefaultRetentionPolicy();
        influxDBConnection.createRetentionPolicy("hour","8h",1,false);
    }

    /**
     * 函数入口
     *
     * @param args
     */
    public static void main(String[] args) {
        InfluxDBTest test = new  InfluxDBTest();
        test.insertBatchTest();
//        test.insertTest();
//        test.batchInsertTest();
//        test.batchInsertTest2();
//        test.queryTest();
    }

    /**
     * 线程插入操作
     */
    public void insertBatchTest() {
        int thread = 10000;
        System.out.println("开启线程同步数据.......");
        long start = System.currentTimeMillis();
        for (int i = 0; i < thread; i++) {
            insertTest();
            System.out.println("插入"+i+"数据.......完成");
        }
        long end = System.currentTimeMillis();
        System.out.println("插入"+thread+"数据.......耗时:"+(end-start));
    }

    /**
     * 线程插入操作
     */
    public void insertByThreadTest() {
        int thread = 10000;
        System.out.println("开启线程同步数据.......");
        long start = System.currentTimeMillis();
        for (int i = 0; i < thread; i++) {
            final int s=i;
            executor.execute(()-> {
                insertTest();
                System.out.println("插入"+s+"数据.......完成");
            });
        }
        long end = System.currentTimeMillis();
        System.out.println("插入"+thread+"数据.......放入线程耗时:"+(end-start));
    }


    /**
     * 插入操作
     */
    public void insertTest() {
        long start = System.currentTimeMillis();
        Map<String, String> tags = new HashMap<>();
        tags.put("user_distance", "标签值");
        Map<String, Object> fields = new HashMap<>();
        fields.put("field1", "String类型");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields.put("field2", new Random().nextDouble());
        // 时间使用毫秒为单位
        influxDBConnection.insert("distance", tags, fields, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
        long end = System.currentTimeMillis();
        System.out.println("插入数据.......耗时:"+(end-start));
    }

    /**
     * 查询操作
     */
    public void queryTest() {
        QueryResult results = influxDBConnection
                .query("SELECT * FROM measurement where name = '大脑补丁'  order by time desc limit 1000");
        //results.getResults()是同时查询多条SQL语句的返回值,此处我们只有一条SQL,所以只取第一个结果集即可。
        QueryResult.Result oneResult = results.getResults().get(0);
        if (oneResult.getSeries() != null) {
            List<List<Object>> valueList = oneResult.getSeries().stream().map(QueryResult.Series::getValues)
                    .collect(Collectors.toList()).get(0);
            if (valueList != null && valueList.size() > 0) {
                for (List<Object> value : valueList) {
                    Map<String, String> map = new HashMap<String, String>();
                    // 数据库中字段1取值
                    String field1 = value.get(0) == null ? null : value.get(0).toString();
                    // 数据库中字段2取值
                    String field2 = value.get(1) == null ? null : value.get(1).toString();
                    // TODO 用取出的字段做你自己的业务逻辑……
                }
            }
        }
    }


    /**
     * 批量插入
     */
    public void batchInsertTest(){
        Map<String, String> tags = new HashMap<>();
        tags.put("tag1", "标签值");
        Map<String, Object> fields1 = new HashMap<>();
        fields1.put("field1", "abc");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields1.put("field2", 123456);

        Map<String, Object> fields2 = new HashMap<>();
        fields2.put("field1", "String类型");
        fields2.put("field2", 3.141592657);
        // 一条记录值
        Point point1 = influxDBConnection.pointBuilder("user_tb_1", System.currentTimeMillis(), tags, fields1);
        Point point2 = influxDBConnection.pointBuilder("user_tb_2", System.currentTimeMillis(), tags, fields2);
        // 将两条记录添加到batchPoints中
        BatchPoints batchPoints1 = BatchPoints.database("db-test").tag("tag1", "标签值1").retentionPolicy("hour")
                .consistency(InfluxDB.ConsistencyLevel.ALL).build();
        BatchPoints batchPoints2 = BatchPoints.database("db-test").tag("tag2", "标签值2").retentionPolicy("hour")
                .consistency(InfluxDB.ConsistencyLevel.ALL).build();
        batchPoints1.point(point1);
        batchPoints2.point(point2);
        // 将两条数据批量插入到数据库中
        influxDBConnection.batchInsert(batchPoints1);
        influxDBConnection.batchInsert(batchPoints2);
    }

    /**
     * 批量序列化插入
     */
    public void batchInsertTest2(){
        Map<String, String> tags1 = new HashMap<String, String>();
        tags1.put("tag1", "标签值");
        Map<String, String> tags2 = new HashMap<String, String>();
        tags2.put("tag2", "标签值");

        Map<String, Object> fields1 = new HashMap<String, Object>();
        fields1.put("field1", "abc");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields1.put("field2", 123456);

        Map<String, Object> fields2 = new HashMap<String, Object>();
        fields2.put("field1", "String类型");
        fields2.put("field2", 3.141592657);
        // 一条记录值
        Point point1 = influxDBConnection.pointBuilder("user_tb_1", System.currentTimeMillis(), tags1, fields1);
        Point point2 = influxDBConnection.pointBuilder("user_tb_2", System.currentTimeMillis(), tags2, fields2);
        BatchPoints batchPoints1 = BatchPoints.database("db-test").tag("tag1", "标签值1")
                .retentionPolicy("hour").consistency(InfluxDB.ConsistencyLevel.ALL).build();
        // 将两条记录添加到batchPoints中
        batchPoints1.point(point1);
        BatchPoints batchPoints2 = BatchPoints.database("db-test").tag("tag2", "标签值2")
                .retentionPolicy("hour").consistency(InfluxDB.ConsistencyLevel.ALL).build();
        // 将两条记录添加到batchPoints中
        batchPoints2.point(point2);
        // 将不同的batchPoints序列化后,一次性写入数据库,提高写入速度
        List<String> records = new ArrayList<String>();
        records.add(batchPoints1.lineProtocol());
        records.add(batchPoints2.lineProtocol());
        // 将两条数据批量插入到数据库中
        influxDBConnection.batchInsert("db-test", "hour", InfluxDB.ConsistencyLevel.ALL, records);
    }

}

插入测试 和简单压测

InfluxDB插入效率比较高,基本控制在25ms-50ms之间。 (样本插入10000数据,加上打印耗时比较正常)

可以基于代码提供压测,简单提供压测:

@RestController
@RequestMapping("/influxdb")
public class InfluxDBController {

    private static final InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin", "http://192.168.1.111:8086", "db-test", "hour");

    @RequestMapping("/saveBySingleton")
    @ResponseBody
    public Map<String, Object> saveBySingleton(){
        Map<String, String> tags = new HashMap<>();
        tags.put("user_distance", "标签值");
        Map<String, Object> fields = new HashMap<>();
        fields.put("field1", "String类型");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields.put("field2", new Random().nextDouble());
        // 时间使用毫秒为单位
        influxDBConnection.insert("distance", tags, fields, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
        return fields;
    }

    @RequestMapping("/saveByStandard")
    @ResponseBody
    public Map<String, Object> saveByStandard(){
        InfluxDBConnection influxDBConnection = new InfluxDBConnection("admin", "admin", "http://192.168.1.111:8086", "db-test", "hour");
        Map<String, String> tags = new HashMap<>();
        tags.put("user_distance", "标签值");
        Map<String, Object> fields = new HashMap<>();
        fields.put("field1", "String类型");
        // 数值型,InfluxDB的字段类型,由第一天插入的值得类型决定
        fields.put("field2", new Random().nextDouble());
        // 时间使用毫秒为单位
        influxDBConnection.insert("distance", tags, fields, System.currentTimeMillis(), TimeUnit.MILLISECONDS);
        influxDBConnection.close();
        return fields;
    }
}

60s内并发测试结果:

结果显示:并发数达到10000左右使用连接方式不一样会造成不稳定,使用单例方式的连接比传统数据方式用完关闭效率要高2倍。

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值