背景
上文中,已经可以获取到Prometheus接口的数据并保存到本地的csv文件中。为了方便数据的保存,接下来直接一步到位把数据存放到MySQL数据库。
实现
# -*- coding: UTF-8 -*-
import csv
import time
import requests
import MySQLdb
import MySQLdb.cursors
import numpy as np
def convert_time_stamp(tt):
# 时间格式转化为时间戳
timeArray = time.strptime(tt, "%Y-%m-%d %H:%M:%S") #转换成时间数组
timestamp = time.mktime(timeArray) #转换成时间戳
return int(timestamp)
def convert_time_format(tt):
# 10位时间戳转化为日期格式
if tt is None:
return None
else:
timeArray = time.localtime(int(tt))
time_format = time.strftime("%Y-%m-%d %H:%M:%S", timeArray)
return time_format
def clear_csv(file):
with open(file,"r+") as f:
f.truncate(0)
def get_data(sql):
url = f'http://xxx.xx.xx.xx:9090/api/v1/query_range?query={sql}&start={convert_time_stamp(start_time)}&end={convert_time_stamp(end_time)}&step=120&timeout=40s'
try:
res = requests.get(url=url)
if res.json()["data"]["result"] == []:
return None,None
else:
vmax = 0
for value in res.json()["data"]["result"][0]["values"]:
vd = float(value[1])
if vd >=vmax:
vmax = vd
point_time = value[0]
return round(vmax,2),point_time
except Exception as err:
print("捕捉异常: {}".format(err))
print("请求url: {}".format(url))
print("请求返回: {}".format(res.json()))
def get_data_with_point_time(sql,point_time):
url = f'http://xxx.xx.xx.xx:9090/api/v1/query_range?query={sql}&start={convert_time_stamp(start_time)}&end={convert_time_stamp(end_time)}&step=120&timeout=40s'
try:
res = requests.get(url=url)
if res.json()["data"]["result"] == []:
return None
else:
for value in res.json()["data"]["result"][0]["values"]:
if value[0] == point_time:
rt = float(value[1])
if np.isnan(rt):
rt = 0.0
else:
rt = round(rt, 2)
return rt
except Exception as err:
print("捕捉异常: {}".format(err))
print("请求url: {}".format(url))
print("请求返回: {}".format(res.json()))
def collect_flow_data_to_mysql(duration):
# 打开数据库连接
db = MySQLdb.connect("localhost", "app", "123456", "Big_Promotion_Flow")
with open('interfacelist.csv', encoding='utf-8') as rf:
reader = csv.reader(rf)
for val in reader:
rps_sql = 'sum(rate(al_meter_request_counter_total{job="' + val[0] + '", instance=~"(' + val[1] + ')", path=~"' + val[2] + '",code=~".*"}[2m]))'
p50rt_sql = 'sum(increase(al_meter_request_summary_sum{job="' + val[0] + '",path=~"' + val[2] + '"}[2m])) by (path)/sum(increase(al_meter_request_summary_count{job="' + val[0] + '",path=~"' + val[2] + '"}[2m])) by (path)'
p99rt_sql = 'max(al_meter_request_summary{job="' + val[0] + '", instance=~"(' + val[1] + ')", path=~"' + val[2] + '",quantile="0.99"}) by(instance,path)'
maxrt_sql = 'max(al_meter_request_summary_max{job="' + val[0] + '",+instance=~"(' + val[1] + ')",+path=~"' + val[2] + '"})+by+(path)'
cpu_sql = '1 - avg(irate(node_cpu_seconds_total{instance=~"' + val[1].replace("|",":9100|") + ':9100' + '",mode="idle"}[30m])) by (instance)'
rps, point_time = get_data(rps_sql)
num = str(val[1]).count('|') + 1
p50rt = get_data_with_point_time(p50rt_sql, point_time)
p99rt = get_data_with_point_time(p99rt_sql, point_time)
maxrt = get_data_with_point_time(maxrt_sql, point_time)
cpu_r = get_data(cpu_sql)[0]
cpu = "%.2f%%" % (cpu_r * 100)
point_time1 = convert_time_format(point_time)
sql = "INSERT INTO report_flow_data(service, host_num, ip, path, rps, average_response_time, p99_response_time, max_response_time, CPU, point_time, query_time_range) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
cursor = db.cursor() # 使用cursor()方法获取操作游标
cursor.execute(sql,(val[0], num, val[1], val[2], rps, p50rt, p99rt, maxrt, cpu, point_time1, duration)) # 使用execute方法执行SQL语句
db.commit() # 更新数据库
db.close() # 关闭数据库连接
if __name__ == '__main__':
start_time = '2022-11-11 16:00:00'
end_time = '2022-11-11 23:59:59'
duration = str(start_time +' - '+ end_time)
collect_flow_data_to_mysql(duration)
如果想进一步在Grafana面板上展示数据,可以看以下文章,功能已经实现。
grafana展示mysql源数据:表格展示
grafana展示mysql表数据:生成折线图