linux ping 生成日志,python实现的linux下ping测试脚本(附日志处理脚本)

ping测试用脚本,python2.6+

#!/opt/python2.7.1/bin/python

# -*- coding: utf8 -*-

import subprocess

import re

import time

import os

lost_rate_match = re.compile('(\d+)\spackets transmitted, (\d+)\sreceived,\s(\d+(.\d+)?%)')

time_match = re.compile('(?P\d+.\d+)\/(?P\d+.\d+)\/(?P\d+.\d+)\/(?P\d+.\d+)')

pre_fix = os.sys.path[0]

class PingTest(object):

def __init__(self, ip, count=100, psize=False, writelog=True):

"""默认一次ping一百个数据包,包大小为系统默认,同时不输出到屏幕,结果输出到日志文件"""

self.record_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())

self._runping(ip, count, psize, writelog)

def _getping(self, ip, count, psize):

"""使用subprocess获取ping测试的原始结果,count为ping的次数,psize为ping包大小"""

if psize:

if isinstance(psize, int):

if psize > 0 and psize <= 65507:

ping_command = 'ping -c %s -s %s %s' %(count, psize, ip)

else:

return 2

else:

return 1

else:

ping_command = 'ping -c %s %s' % (count, ip)

ping = subprocess.Popen(ping_command,

stdin = subprocess.PIPE,

stdout = subprocess.PIPE,

stderr = subprocess.PIPE,

shell = True)

return ping.stdout.read()

def _runping(self, ip, count, psize, writelog):

"""主函数,调用_getping获取原始数据并做一定的格式处理,同时可选择记录到日志中"""

ping_ori_result = self._getping(ip, count, psize)

if ping_ori_result == 1:

print 'psize must be type'

return 0

if ping_ori_result == 2:

print 'packet size 65527 is too large. Maximum is 65507'

return 0

data_format = self._format(self._getresult(ping_ori_result))

if writelog:

if self._writelog(ip, data_format):

return 1

else:

return 0

else:

print data_format

def _writelog(self, ip, result):

"""将结果记录到日志中,位置为脚本所在目录的logs目录"""

filename = '%s/logs/%s.log' %(pre_fix, ip)

try:

f = open(filename, 'a')

except IOError, e:

print e

return 0

f.write('%s\n' % result)

f.close()

return 1

def _getresult(self, context):

"""从原始数据中或许丢包率和ping包返回时间"""

lost_rate = lost_rate_match.findall(context)

time_result = time_match.findall(context)

return (lost_rate,time_result)

def _format(self, rawdata):

"""将数据排列成一定的格式"""

(lostrate, time_result) = rawdata

if time_result:

time_context = ' / '.join(time_result[0])

else:

time_context = 'null'

return '[%s] total: %s received: %s lost: %s time: %s' % (self.record_time, lostrate[0][0], lostrate[0][1], lostrate[0][2],time_context)

if __name__ == '__main__':

from multiprocessing import Pool

import os

iplist = '%s/ip.list' % pre_fix

if os.path.exists(iplist):

p = Pool(100)

p.map(PingTest, filter(None ,[x.strip() for x in open(iplist, 'r').readlines()]))

else:

print 'Not Found %s' % iplist

日志处理脚本:使用时需修改日志所在路径以及确认'/tmp/ping_analysis/ip_city_op.txt'的存在(文件格式在原程序中有说明),如遇中文问题请自行参考decode和encode

#!/usr/bin/env python

# -*- coding: utf8 -*-

import re

log_path = '/tmp/ping_analysis/logs_BJ/logs/'

result_sets = {}

date_format = re.compile('(\d{4}-\d{2}-\d{2}\s\d{2}).*')

"""获取ip对应城市和线路信息并生成字典

原始文件格式为ip地址\t城市\t线路,如:202.96.209.133\t上海\t中国电信

生成字典格式为 ip地址:ip地址(城市/线路),如:{'202.96.209.133':'202.96.209.133(上海/中国电信)'}

"""

ip_city = {}

map(lambda x : ip_city.update({x[0]:'%s(%s/%s)'%(x[0],x[1],x[2])}), map(lambda x: x.strip().split('\t'), open('/tmp/ping_analysis/ip_city_op.txt').readlines()))

def processdata(ori_data):

"""按照date_format的格式统计数据"""

datetime = ori_data[0].strip()

ping_lost = float(ori_data[1].strip())

if ori_data[2].strip() == 'null':

ping_min = 0.0

ping_avg = 0.0

ping_max = 0.0

else:

ping_min = float(ori_data[2].strip())

ping_avg = float(ori_data[3].strip())

ping_max = float(ori_data[4].strip())

datetime_key = date_format.match(datetime).group(1)

if datetime_key in result_sets:

result_sets[datetime_key]['total_lost'] = result_sets[datetime_key].get('total_lost') + ping_lost

result_sets[datetime_key]['total_min'] = result_sets[datetime_key].get('total_min') + ping_min

result_sets[datetime_key]['total_avg'] = result_sets[datetime_key].get('total_avg') + ping_avg

result_sets[datetime_key]['total_max'] = result_sets[datetime_key].get('total_max') + ping_max

result_sets[datetime_key]['count'] = result_sets[datetime_key].get('count') + 1

else:

result_sets[datetime_key] = {}

result_sets[datetime_key]['total_lost'] = ping_lost

result_sets[datetime_key]['total_min'] = ping_min

result_sets[datetime_key]['total_avg'] = ping_avg

result_sets[datetime_key]['total_max'] =  ping_max

result_sets[datetime_key]['count'] = 1

def data_format(fp=0):

"""processdata处理完数据后进行平均值的计算并写入指定文件"""

items = result_sets.keys()

items.sort()

for item in items:

count = result_sets[item].get('count')

avg_lost = round(result_sets[item].get('total_lost') / count, 4)

avg_min = round(result_sets[item].get('total_min') / count, 4)

avg_avg = round(result_sets[item].get('total_avg') / count, 4)

avg_max = round(result_sets[item].get('total_max') / count, 4)

if fp:

fp.write('\t%s\t%s%s\t%s\t%s\t%s\n' % (item, avg_lost, chr(37), avg_min, avg_avg, avg_max))

def originaldata(items, fp=0):

"""不运行processdata直接将数据输出到指定文件"""

for item in items:

if fp:

if item[2] == 'null':

fp.write('\t%s\t%s%s\t%s\t%s\t%s\n' % (item[0], item[1], chr(37), item[2], '', ''))

else:

fp.write('\t%s\t%s%s\t%s\t%s\t%s\n' % (item[0], item[1], chr(37), item[2], item[3], item[4]))

def runprocess(original_data, fp=0, start_date=0, hourly=True):

"""主函数"""

def middleprocess(original_data, start):

"""将原始数据分割,并可按开始时间记录数量"""

middle_result = []

middle_temp = map(lambda x: tuple(x.strip().split(',')),original_data)

if start_date:

for handle in middle_temp:

if handle[0] >= start:

middle_result.append(handle)

return middle_result

else:

return middle_temp

if hourly:

"""数据是否要经过processdata的处理"""

map(processdata, middleprocess(original_data, start_date))

return data_format(fp)

else:

originaldata(middleprocess(original_data, start_date), fp)

def file_format(filename):

"""读取日志文件,并对每一行做格式化处理:去除[,],%以方便后续的计算"""

context = filter(None, map(lambda x: x.strip().split(), open(filename, 'r').readlines()))

def format_data(line):

if line[9].strip() == 'null':

time_line = 'null'

else:

time_line = '%s,%s,%s' % (line[9], line[11], line[13])

return '%s %s,%s,%s' % (line[0].strip('[ '),line[1].strip('] '), line[7].strip('% '), time_line)

return map(format_data, context)

if __name__ == '__main__':

import os

result_log = open('%s%s' %(log_path,'result_log.xls'), 'w')

for dirpath, dirnames, filenames in os.walk(log_path):

print dirpath

for filename in filenames:

if filename.endswith('.log'):

result_log.write('%s\n' % ip_city.get(filename.strip('.log')))

runprocess(file_format('%s%s' % (log_path, filename)), result_log, start_date='2011-03-24 18:00:00', hourly=False)

result_log.close()

对输出的处理后的文件再次进行截断处理:注意日志处理后的文件的路径

# -*- coding: utf8 -*-

import re

import os

date_format = re.compile('(\d{4}-\d{2}-\d{2}\s\d{2}).*')

def cutdata(filename, start=0, end=0, flush=False):

"""按照一定的时间截取数据"""

if not os.path.exists(filename):

return 'Not Found %s' % filename

cutdata_filename = '%s_cuted' %filename

if flush and os.path.exists(cutdata_filename):

os.remove(cutdata_filename)

if start > end:

return 'starttime greater than endtime'

context = open(filename, 'r').readlines()

pl = []

for item in context:

t = date_format.findall(item)

if t:

if end:

if t[0] >= start and t[0] <= end:

pl.append(item)

else:

if t[0] >= start:

pl.append(item)

else:

pl.append(item)

f = open(cutdata_filename, 'a')

for i in pl:

f.write(i)

f.close()

if __name__ == '__main__':

cutdata('/root/shell/logs/result.xls',start='2011-03-29 18')

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值