Prometheus api 获取监控数据导出 CSV
1、 发送给企业微信机器人
# upload_file 是为了生成 media_id, 供消息使用
# -*- encoding: utf-8 -*-
import requests, paramiko, json, os, datetime
import pandas as pd
from copy import copy
from urllib3 import encode_multipart_formdata
def upload_file(file_path, wx_upload_url):
file_name = file_path.split("/")[-1]
with open(file_path, 'rb') as f:
length = os.path.getsize(file_path)
data = f.read()
headers = {"Content-Type": "application/octet-stream"}
params = {
"filename": file_name,
"filelength": length,
}
file_data = copy(params)
file_data['file'] = (file_path.split('/')[-1:][0], data)
encode_data = encode_multipart_formdata(file_data)
file_data = encode_data[0]
headers['Content-Type'] = encode_data[1]
r = requests.post(wx_upload_url, data=file_data, headers=headers)
print(r.text)
media_id = r.json()['media_id']
return media_id
# media_id 通过上一步上传的方法获得
def qi_ye_wei_xin_file(wx_url, media_id):
headers = {"Content-Type": "text/plain"}
data = {
"msgtype": "file",
"file": {
"media_id": media_id
}
}
r = requests.post(
url=wx_url,
headers=headers, json=data)
print(r.text)
2、 从Prometheus 上获取监控数据
# 得到一个Overssh的返回值
def get_file_list(hostname, port, username, key_name, command):
ssh = paramiko.SSHClient() # 创建SSH对象
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # 允许连接不在know_hosts文件中的主机
# ssh.connect(hostname='xxx.xxx.xxx.xxx', port=22, username='root', password='xxx') # 连接服务器
ssh.connect(hostname=hostname, port=port, username=username,
pkey=paramiko.RSAKey.from_private_key_file(key_name)) # 连接服务器
stdin, stdout, stderr = ssh.exec_command(command)
file_list = str(stdout.read(), encoding="utf-8")
ssh.close() # 关闭连接
return file_list
3、将PD写入到磁盘
def write_to_dish(results,fileDir):
writer = pd.ExcelWriter(fileDir)
results.to_excel(writer,float_format='%.5f')
writer.save()
4、定义好需要查询的metric
# 查询需要检查的数据,第一个必须为 up
def rds_metrics_list():
return {'up': '状态',
'rds001_cpu_util': 'CPU使用率(%)',
'rds002_mem_util': '内存使用率(%)',
'rds003_iops': 'iops',
'rds004_bytes_in': '网络流量入(Bytes)',
'rds005_bytes_out': '网络流量出(Bytes)',
'rds008_qps': 'QPS',
'rds009_tps': 'TPS',
'rds081_vm_ioutils': 'io使用率(%)',
'rds047_disk_total_size': '磁盘总大小(G)',
'rds048_disk_used_size': '磁盘使用(G)',
'mysql_global_status_threads_connected': '当前连接数',
'mysql_global_variables_max_connections': '最大连接数',
}
5、Prometheus 获取数据落盘数据,并发送给企业微信机器人
if __name__ == '__main__':
# prometheus 主机信息
host_info = {'hostname': 'xxx.xxx.xxx.xxx', 'port': 22, 'username': 'root', 'key_name': './keys/xxx'}
# 检查文件路径
check_file_dir = 'check_files'
# 检查文件时间,时区问题需要减去8小时
curhour = -17 # 默认给-17 当前时间是-8.1
# 检查项目 为job名prometheus 中的job, name 为项目的巡检名字, robot_key 为企业微信机器人
check_file_dict = {'job': {'name': 'name', 'value': [], 'key': 'xxx',
'robot_key': 'xxx'}}
if not os.path.exists(check_file_dir):
os.makedirs(check_file_dir)
metrics_list = rds_metrics_list()
check_time_day = (datetime.datetime.now() + datetime.timedelta(hours=curhour)).strftime("%Y-%m-%d")
check_time_hour = (datetime.datetime.now() + datetime.timedelta(hours=curhour)).strftime("%H:%M")
print(check_time_day, check_time_hour)
for metric in metrics_list:
command = 'curl http://127.0.0.1:9090/api/v1/query_range?query=%s\&start=%sT%s:00Z\&end=%sT%s:15Z\&step=15s' \
%(metric, check_time_day, check_time_hour, check_time_day, check_time_hour)
file_list = get_file_list(host_info['hostname'], host_info['port'], host_info['username'], host_info['key_name'], command)
metrics = json.loads(file_list)
for key in metrics['data']['result']:
# Mysql数据库巡检
Prometheus_job = key['metric']['job']
Prometheus_instance = key['metric']['instance']
for project in check_file_dict:
if Prometheus_job == project and key['metric']['__name__'] == 'up':
metric_dict = {}
metric_dict['instance'] = key['metric']['instance']
metric_dict[metrics_list[metric]] = key['values'][0][1]
check_file_dict[key['metric']['job']]['value'].append(metric_dict)
# print(check_file_dict)
for instance in check_file_dict[project]['value']:
if key['metric']['job'] == check_file_dict[project]['key'] and key['metric']['instance'] == instance['instance']:
instance[metrics_list[metric]] = key['values'][0][1]
for key in check_file_dict:
test_report = './'+check_file_dir+'/'+check_file_dict[key]['name']+'_'+check_time_day+'.xlsx'
print(test_report)
if os.path.exists(test_report):
os.remove(test_report)
print(check_file_dict[key]['value'])
write_to_dish(pd.DataFrame(check_file_dict[key]['value']),
fileDir=test_report)
#发送企业微信机器人
wx_api_key = check_file_dict[key]['robot_key'] # 这个地方写你自己的key
wx_upload_url = "https://qyapi.weixin.qq.com/cgi-bin/webhook/upload_media?key={}&type=file".format(wx_api_key)
wx_url = 'https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={}'.format(wx_api_key)
media_id = upload_file(test_report, wx_upload_url)
qi_ye_wei_xin_file(wx_url, media_id)
6、最终群组里面收到机器巡检文件