【SpirngBoot整合InfluxDB 2.1.1教程】

1.安装influxDB 2.X

示例使用docker进行演示。
拉取镜像

docker pull influxdb:2.1.1

启动容器

docker run -d --cap-add SYS_TIME --name influxdb2 --restart always -p 8086:8086 -v /home/influxdb/data:/var/lib/influxdb2 influxdb:2.1.1

-d: 以分离模式运行容器,即在后台运行。
–cap-add SYS_TIME: 向容器添加 SYS_TIME 能力,允许容器内的进程调整系统时钟。这是因为 InfluxDB 可能需要根据主机和容器之间的时间差异来校准时间。
–name influxdb2: 指定容器的名称为 influxdb2。
–restart always: 配置容器在遇到异常停止后总是自动重启。
-p 8086:8086: 映射容器的 8086 端口到宿主机的 8086 端口,这样就可以通过宿主机的 8086 端口访问容器中的服务。
-v /home/influxdb/data:/var/lib/influxdb2: 将宿主机上的 /home/influxdb/data 目录挂载到容器的 /var/lib/influxdb2 目录,这样可以持久化存储数据。 influxdb:2.1.1: 指定要使用的
InfluxDB 镜像版本为 2.1.1。

启动容器后访问对应主机ip+端口8086,初次访问需要对influxdb进行初始化设置,这是用户名、密码、org
在这里插入图片描述

2.SpirngBoot整合InfluxDB

2.1 pom.xml添加Maven依赖

<!--influxdb-->
<dependency>
    <groupId>com.influxdb</groupId>
    <artifactId>influxdb-client-java</artifactId>
    <version>3.1.0</version>
</dependency>
<dependency>
    <groupId>com.influxdb</groupId>
    <artifactId>influxdb-spring</artifactId>
    <version>3.2.0</version>
</dependency>
<!--actuator-autoconfigure不加会报错-->
<dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-actuator-autoconfigure</artifactId>
</dependency>

2.1 application.yml配置文件添加配置

spring:
  influx:
    url: http://10.10.160.70:8086
    username: admin
    password: 12345678
    token: D9szLjyEwg-XbtMJ9NXgr4TMOUbZV8iApxmdtiFuHpoHlPhemvE50yLuZjBlEX5fxy8sysOLQC4PmCPsX5AIJQ==
    org: gz
    bucket: station
#去除influx报错
management:
  metrics:
    export:
      influx:
        enabled: false

在这里插入图片描述
其中token在下图中查看
在这里插入图片描述
在这里插入图片描述

2.3 InfluxDBClientConfig

package com.gzrobot.baseCommon.config;

import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.InfluxDBClientFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

/**
 * @author : lpx
 * @Description :
 * @since : 2024/6/20 14:41
 */
@Configuration
public class InfluxDBClientConfig {


    @Value("${spring.influx.url}")
    private String url;

    @Value("${spring.influx.token}")
    private String token;

    @Value("${spring.influx.org}")
    private String org;

    @Value("${spring.influx.bucket}")
    private String bucket;


    @Bean
    public InfluxDBClient influxDBClient() {
        return InfluxDBClientFactory.create(url, token.toCharArray(), org, bucket);
    }

}

2.4 InfluxDBUtil

package com.gzrobot.baseCommon.utils;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.date.TimeInterval;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.util.ObjectUtil;
import com.beust.jcommander.internal.Lists;
import com.gzrobot.baseCommon.entity.MonitorData;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.WriteApi;
import com.influxdb.client.WriteApiBlocking;
import com.influxdb.client.WriteOptions;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import com.influxdb.query.FluxRecord;
import com.influxdb.query.FluxTable;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.time.OffsetDateTime;
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
@Component
public class InfluxDBUtil {

    @Resource
    private InfluxDBClient influxDBClient;

    @Value("${spring.influx.org}")
    private String organization;//这个属性不能命名为 org 因为和@Slf4j 冲突,会报错

    @Value("${spring.influx.bucket}")
    private String bueckt;

    private static final String MEASUREMENT = "monitor_data";

    public void writePoints(String bucket, String org, List<Point> points) {
        if (CollUtil.isEmpty(points)) {
            log.info("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= writePoints 数据为空,不写入!");
            return;
        }
        WriteOptions writeOptions = WriteOptions.builder()
                .batchSize(5000)
                .flushInterval(1000)
                .bufferLimit(10000)
                .jitterInterval(0)
                .retryInterval(5000)
                .build();
        try (WriteApi writeApi = influxDBClient.getWriteApi(writeOptions)) {
            log.info("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= influxDB 将写入 {} 个点位", points.size());
            writeApi.writePoints(bucket, org, points);
        } catch (Exception e) {
            log.error("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= influxdb 写入失败!", e);
        }
    }

    public void writePointsBlocking(String bucket, String org, List<Point> points) {
        if (CollUtil.isEmpty(points)) {
            log.info("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= writePointsBlocking 数据为空,不写入!");
            return;
        }
        try {
            WriteApiBlocking writeApi = influxDBClient.getWriteApiBlocking();
            log.info("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= influxDB 将阻塞写入 {} 个点位", points.size());
            writeApi.writePoints(bucket, org, points);
            log.info("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= influxDB 阻塞写入成功 {} 个点位!", points.size());
        } catch (Exception e) {
            log.error("-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= influxdb 阻塞写入失败!", e);
        }
    }

    public List<FluxTable> query(String statement) {
        return influxDBClient.getQueryApi().query(statement);
    }

    public void delete(OffsetDateTime start, OffsetDateTime stop, String predicate, String bucket, String org) {
        influxDBClient.getDeleteApi().delete(start, stop, predicate, bucket, org);
    }


    public List<MonitorData> queryInfluxdb(MonitorData monitorData,
                                           DateTime begin,
                                           DateTime end) {
        String query = buildQuery(DateUtil.date(begin), DateUtil.date(end), monitorData);
        TimeInterval timer = new TimeInterval();
        List<FluxTable> table = query(query);
        log.info("******************** influxDBUtil.查询 执行完成,耗时{} ********************", DateUtil.formatBetween(timer.interval()));
        timer.restart();
        List<MonitorData> monitorDataList = Lists.newArrayList();
        for (FluxTable fluxTable : table) {
            List<FluxRecord> fluxRecords = fluxTable.getRecords();
            for (FluxRecord record : fluxRecords) {
                Map<String, Object> values = record.getValues();
                MonitorData monitorData1 = new MonitorData();
                monitorData1.setMonitorDeviceCode(MapUtil.getStr(values, "monitor_device_code"));
                monitorData1.setAlarmType(MapUtil.getStr(values, "alarmType"));
                monitorData1.setAlarmLevel(MapUtil.getStr(values, "alarmLevel"));
                monitorData1.setValue(MapUtil.getStr(values, "_value"));
                monitorData1.setTime(DateUtil.toLocalDateTime(MapUtil.getDate(values, "_time")));
                monitorDataList.add(monitorData1);
            }
        }
        log.info("******************** result size:" + monitorDataList.size() + " 遍历influx数据转换完成,耗时{} ********************", DateUtil.formatBetween(timer.interval()));

        return monitorDataList;

    }

    /**
     * 构建普通查询
     *
     * @param begin
     * @param end
     * @param monitorData
     * @return
     */
    private String buildQuery(DateTime begin, DateTime end, MonitorData monitorData) {
        String query = buildQueryStr(begin, end, monitorData);
        query += " |> sort(columns:[\"_time\"], desc:false)";
        return query;
    }

    /**
     * 构建分页查询
     *
     * @param begin
     * @param end
     * @param monitorData
     * @return
     */
    public String[] buildQuery4page(DateTime begin, DateTime end, MonitorData monitorData, Long pageSize, Long pageNum) {
        String query = buildQueryStr(begin, end, monitorData);
        String count = query + "|> group()" + " |> count()";
        query += " |> sort(columns:[\"_time\"], desc:true)";
        query += " |> group()";
        query += " |> limit(n: " + pageSize + ", offset: " + (--pageNum * pageSize) + ")";
        return new String[]{query, count};
    }

    private String buildQueryStr(DateTime begin, DateTime end, MonitorData monitorData) {
        DateTime beginFormatDate = DateUtil.offsetHour(begin, -8);
        DateTime endFormatDate = DateUtil.offsetHour(end, -8);
        String query = "from(bucket: \"" + bueckt + "\") |> range(start: " + DateUtil.format(beginFormatDate, DatePattern.UTC_PATTERN)
                + ", stop: " + DateUtil.format(DateUtil.offsetSecond(endFormatDate, 1), DatePattern.UTC_PATTERN) +
                ") |> filter(fn: (r) => r[\"_measurement\"] == \"" + MEASUREMENT + "\"" + ")";
        String filter = "";
        String monitorDeviceCode = monitorData.getMonitorDeviceCode();
        if (StringUtils.isNotBlank(monitorDeviceCode)) {
            List<String> collect = Arrays.stream(monitorDeviceCode.split(",")).collect(Collectors.toList());
            String filter1 = " |> filter(fn: (r) => " + collect.stream().map(u -> " r[\"monitor_device_code\"] == \"" + u + "\"").collect(Collectors.joining("or", "", ")"));
            filter += filter1;
        }
        if (StringUtils.isNotBlank(monitorData.getAlarmType())) {
            String filter2 = " |> filter(fn: (r) => r[\"alarmType\"] == \"" + monitorData.getAlarmType() + "\")";
            filter += filter2;
        }
        if (StringUtils.isNotBlank(monitorData.getAlarmLevel())) {
            String filter3 = " |> filter(fn: (r) => r[\"alarmLevel\"] == \"" + monitorData.getAlarmLevel() + "\")";
            filter += filter3;
        }
        if (ObjectUtil.isNotNull(monitorData.getCheckState())) {
            String filter4 = " |> filter(fn: (r) => r[\"checkState\"] == \"" + monitorData.getCheckState() + "\")";
            filter += filter4;
        }
        String filterValue = "|> filter(fn: (r) => r[\"_field\"] == \"value\")";
        filter += filterValue;
        query += filter;
        return query;
    }

    private DateTime formatDate(Date collectTime) {
        String formatStartDateStr = DateUtil.formatDate(collectTime);
        return DateUtil.parse(formatStartDateStr + " " + String.format("%02d", DateUtil.hour(collectTime, true)) + ":00:00");
    }


    public void saveMonitorData2InfluxDB(MonitorData monitorData) {
        List<Point> pointList = new ArrayList<>();
        Point point = Point
                .measurement(MEASUREMENT)
                .time(DateUtil.date(monitorData.getTime()).toInstant(), WritePrecision.NS)
                .addTag("monitorDeviceCode", monitorData.getMonitorDeviceCode())
                .addTag("alarmType", monitorData.getAlarmType())
                .addTag("alarmLevel", monitorData.getAlarmLevel())
                .addTag("checkState", monitorData.getCheckState().toString())
                .addField("value", monitorData.getValue());
//                .addField("originalValue", monitorData.getOriginalValue());
        pointList.add(point);
        writePoints(bueckt, organization, pointList);
    }

}

2.5 测试写入数据

@GetMapping("/testInflux")
    public R testInflux() throws InterruptedException {

        DateTime dateTime = DateUtil.date().offset(DateField.MONTH, -3);
        for (int i = 0; i < 90; i++) {
            int finalI = i;
            threadPoolExecutor.submit(() -> {
                writePoint(dateTime, finalI);
            });
            Thread.sleep(1000);
        }
        return new R();
    }
    
    private void writePoint(DateTime dateTime, int index) {
        String valueStr = "[{\"unit\":\"℃\",\"name\":\"温度\",\"value\":\"" + RandomUtil.randomDouble(1, 40,2, RoundingMode.CEILING) + "\"},{\"unit\":\"%\",\"name\":\"湿度\",\"value\":\"95\"}]";
        DateTime dateTime1 = DateUtil.offsetDay(dateTime, index + 1);
        System.out.println("dateTime1 = " + dateTime1);
        Instant instant = dateTime1.toInstant();
        List<Point> pointList = new ArrayList<>();
        for (int i = 0; i < 86400; i++) {
            Map<String, Object> fieldsMap = new HashMap<>();
            fieldsMap.put("value", valueStr);
            Point point = Point
                    .measurement(MEASUREMENT)
                    .time(instant.plusSeconds(i), WritePrecision.NS)
                    .addTag("monitor_device_code", "PLC8PBML")
                    .addTag("alarmType", "104")
                    .addTag("alarmLevel", "1")
                    .addTag("checkState", "0")
                    .addField("value", valueStr)
                    .addField("originalValue", valueStr);
            pointList.add(point);
        }
        atomicLong.addAndGet(pointList.size());
        log.info("总计已写入点位 = " + atomicLong.get());
        TimeInterval timer = new TimeInterval();
        influxDBUtil.writePoints(BUCKET, ORZ, pointList);
        log.info("********************influxDBUtil.写入执行完成,耗时{} ********************", DateUtil.formatBetween(timer.interval()));
    }

在这里插入图片描述

2.6 测试查询数据

    public List<MonitorData> queryInfluxdb(MonitorData monitorData,
                                           DateTime begin,
                                           DateTime end) {
        String query = buildQuery(DateUtil.date(begin), DateUtil.date(end), monitorData);
        TimeInterval timer = new TimeInterval();
        List<FluxTable> table = query(query);
        log.info("******************** influxDBUtil.查询 执行完成,耗时{} ********************", DateUtil.formatBetween(timer.interval()));
        timer.restart();
        List<MonitorData> monitorDataList = Lists.newArrayList();
        for (FluxTable fluxTable : table) {
            List<FluxRecord> fluxRecords = fluxTable.getRecords();
            for (FluxRecord record : fluxRecords) {
                //数据转换
                Map<String, Object> values = record.getValues();
                MonitorData monitorData1 = new MonitorData();
                monitorData1.setMonitorDeviceCode(MapUtil.getStr(values, "monitor_device_code"));
                monitorData1.setAlarmType(MapUtil.getStr(values, "alarmType"));
                monitorData1.setAlarmLevel(MapUtil.getStr(values, "alarmLevel"));
                monitorData1.setValue(MapUtil.getStr(values, "_value"));
                monitorData1.setTime(DateUtil.toLocalDateTime(MapUtil.getDate(values, "_time")));
                monitorDataList.add(monitorData1);
            }
        }
        log.info("******************** result size:" + monitorDataList.size() + " 遍历influx数据转换完成,耗时{} ********************", DateUtil.formatBetween(timer.interval()));
        return monitorDataList;

    }

在这里插入图片描述

通过上面的示例,已经可以整合好influxdb了,且有简单的使用示例。包括保存influxdb数据,查询,分页查询等。
若有疑问,欢迎评论、讨论。
对你有帮助的话记得点赞收藏!

以下是整合Jedis的步骤: 1. 添加Jedis依赖 在 pom.xml 文件中添加 Jedis 的依赖: ``` <dependency> <groupId>redis.clients</groupId> <artifactId>jedis</artifactId> <version>3.0.1</version> </dependency> ``` 2. 配置Jedis连接池 在 Spring Boot 中,可以在 application.properties 文件中配置 Jedis 连接池相关的信息。例如: ``` # Redis spring.redis.host=127.0.0.1 spring.redis.port=6379 spring.redis.password= spring.redis.database=0 # jedis pool config spring.redis.jedis.pool.max-active=8 spring.redis.jedis.pool.max-idle=8 spring.redis.jedis.pool.max-wait=-1ms spring.redis.jedis.pool.min-idle=0 ``` 3. 创建Jedis实例 在需要使用 Jedis 的类中,可以通过以下方式创建 Jedis 实例: ``` @Component public class RedisUtil { @Autowired private JedisPool jedisPool; public Jedis getJedis() { return jedisPool.getResource(); } public void close(Jedis jedis) { if (jedis != null) { jedis.close(); } } } ``` 这里使用了 Spring Boot 的依赖注入机制,通过注入 JedisPool 对象来获取 Jedis 实例。 4. 使用Jedis 通过以上步骤,就可以在项目中使用 Jedis 了,例如: ``` @Autowired private RedisUtil redisUtil; public void set(String key, String value) { Jedis jedis = null; try { jedis = redisUtil.getJedis(); jedis.set(key, value); } finally { redisUtil.close(jedis); } } public String get(String key) { Jedis jedis = null; try { jedis = redisUtil.getJedis(); return jedis.get(key); } finally { redisUtil.close(jedis); } } ``` 以上代码中,通过 RedisUtil 工具类获取 Jedis 实例,并使用该实例进行 Redis 操作。操作完成后,需要手动关闭 Jedis 连接。
评论 3
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值