最近某項目忽然提出一個新需求,須要每週五14點,獲取他們監控項目天天20-24點監控平均數據,以小時爲單位的,輸出文件是excel的,要求以天天爲單位單獨一個sheet,彙總郵件轉給業務。python
他們主要是作業務使用量報表,每週週報使用,雖然需求困難,但做爲運維也得解決,下面是郵件的效果圖。mysql
能夠看到郵件標題是帶有項目名稱與時間,收集人是業務與我。linux
下面是excel的格式git
天天一個sheet,獲取這些項目本身天天20-24點的監控平均數據,以小時爲單位。github
主要是使用sql查看上面的監控數據,並經過python把數據彙總到excel裏並使用crontab定時發送。sql
重要:我這裏對默認的linux監控模板,添加了一個監控主機cpu數量的item,key名稱是system.cpu.num,請你們也加入到模板裏,不然導出的excel裏是空的。數據庫
下面是腳本內容,你們改改本身須要獲取項目組與發送郵箱信息就行(我是使用163郵箱)bash
#!/usr/bin/env python #coding=utf-8 #Author: Denglei #Email: dl528888@gmail.com #QQ: 244979152 import MySQLdb import datetime import xlwt import sys from email.mime.multipart import MIMEMultipart from email.mime.base import MIMEBase from email.mime.text import MIMEText from email.utils import COMMASPACE,formatdate from email import encoders import os def send_mail(server, fro, to, subject, text, files=[]): assert type(server) == dict assert type(to) == list assert type(files) == list msg = MIMEMultipart() msg['From'] = fro msg['Subject'] = subject msg['To'] = COMMASPACE.join(to) #COMMASPACE==', ' msg['Date'] = formatdate(localtime=True) msg.attach(MIMEText(text)) for f in files: part = MIMEBase('application', 'octet-stream') #'octet-stream': binary data part.set_payload(open(f, 'rb').read()) encoders.encode_base64(part) part.add_header('Content-Disposition', 'p_w_upload; filename="%s"' % os.path.basename(f)) msg.attach(part) import smtplib smtp = smtplib.SMTP(server['name'], server['port']) smtp.ehlo() smtp.starttls() smtp.ehlo() smtp.login(server['user'], server['passwd']) smtp.sendmail(fro, to, msg.as_string()) smtp.close() def get_mysql_data(sql): cur.execute(sql) results=cur.fetchall() return results def cover_excel(msg,start_time): #wb = xlwt.Workbook() ws = wb.add_sheet(start_time,cell_overwrite_ok=True) count=len(msg) x=msg title=['時間'.encode('utf8'),'所屬組'.encode('utf8'),'主機IP'.encode('utf8'),'CPU邏輯核數(單位:個)'.encode('utf8'),'CPU空閒值(單位:%)'.encode('utf8'),'可用內存值(單位:GB)'.encode('utf8'),'總內存值(單位:GB)'.encode('utf8'),'公網進入流量(單位:kbps)'.encode('utf8'),'公網流出流量(單位:kbps)'.encode('utf8')] x.insert(0,title) for j in range(0,9): for i in range(0,count): if i == 0: #ws.write(i,j,title[j].decode('utf8')) value=x[0] else: value=x[i] if isinstance(value[j],long) or isinstance(value[j],int) or isinstance(value[j],float): ws.write(i,j,value[j]) else: ws.write(i,j,value[j].decode('utf8')) #wb.save('/tmp/zabbix_log/chance_zabbix_monitor_test.xls') def run_select(start_time,end_time): get_cpu_idle_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg),2) as Cpu_Idle from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends hi on i.itemid = hi.itemid where i.key_='system.cpu.util[,idle]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%廣告%%' group by h.host;"%(start_time,end_time) cpu_idle_result=get_mysql_data(get_cpu_idle_sql) get_cpu_num_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,avg(hi.value_avg) as Cpu_Number from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='system.cpu.num' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%廣告%%' group by h.host;"%(start_time,end_time) cpu_num_result=get_mysql_data(get_cpu_num_sql) get_mem_avai_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1024/1024/1024),2) as Memory_Avaiable from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='vm.memory.size[available]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%廣告%%' group by h.host;"%(start_time,end_time) mem_avai_result=get_mysql_data(get_mem_avai_sql) #get_mem_free_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,hi.value_avg/1024/1024/1024 as Memory_Avaiable from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='vm.memory.size[free]' and hi.clock >= UNIX_TIMESTAMP('%s') and hi.clock < UNIX_TIMESTAMP('%s') and g.name like '%%廣告%%';"%(start_time,end_time) #mem_free_result=get_mysql_data(get_mem_free_sql) get_mem_total_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1024/1024/1024),2) as Memory_Total from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='vm.memory.size[total]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%廣告%%' group by h.host;"%(start_time,end_time) mem_total_result=get_mysql_data(get_mem_total_sql) get_em2_in_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1000),2) as Network_Eth0_In from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='net.if.in[em2]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%廣告%%' group by h.host;"%(start_time,end_time) em2_in_result=get_mysql_data(get_em2_in_sql) get_em2_out_sql="select from_unixtime(hi.clock,'%%Y-%%m-%%d %%T') as Date,g.name as Group_Name,h.host as Host,round(avg(hi.value_avg/1000),2) as Network_Eth0_In from hosts_groups hg join groups g on g.groupid = hg.groupid join items i on hg.hostid = i.hostid join hosts h on h.hostid=i.hostid join trends_uint hi on i.itemid = hi.itemid where i.key_='net.if.out[em2]' and hi.clock >= UNIX_TIMESTAMP('%s 20:00:00') and hi.clock < UNIX_TIMESTAMP('%s 00:00:00') and g.name like '%%廣告%%' group by h.host;"%(start_time,end_time) em2_out_result=get_mysql_data(get_em2_out_sql) msg=[list(i) for i in cpu_num_result] for i in msg: for ii in cpu_idle_result: if i[0] ==ii[0] and i[1] == ii[1] and i[2] == ii[2]: i[3]=int(i[3]) #msg.append([i[0],i[1],i[2],int(i[3]),ii[3]]) i.append(int(ii[3])) for iii in mem_avai_result: if i[0] ==iii[0] and i[1] == iii[1] and i[2] == iii[2]: i.append(round(float(iii[3]),2)) for iiii in mem_total_result: if i[0] ==iiii[0] and i[1] == iiii[1] and i[2] == iiii[2]: i.append(int(iiii[3])) for a in em2_in_result: if i[0] == a[0] and i[1] == a[1] and i[2] == a[2]: i.append(int(a[3])) if len(i) == 7: i.append(0) for b in em2_out_result: if i[0] == b[0] and i[1] == b[1] and i[2] == b[2]: i.append(int(b[3])) if len(i) == 8: i.append(0) cover_excel(msg,start_time) def main(): for i in range(7,0,-1): start_time=((datetime.datetime.now() - datetime.timedelta(days = i))).strftime("%Y-%m-%d") end_time=((datetime.datetime.now() - datetime.timedelta(days = i-1))).strftime("%Y-%m-%d") run_select(start_time,end_time) if __name__ == "__main__": default_encoding = 'utf-8' if sys.getdefaultencoding() != default_encoding: reload(sys) sys.setdefaultencoding(default_encoding) if os.path.exists("/tmp/zabbix_log/") is False: os.mkdir("/tmp/zabbix_log/") conn=MySQLdb.connect(host='10.10.14.19',user='zabbix',passwd='zabbix',port=3306,charset="utf8") cur=conn.cursor() conn.select_db('zabbix') wb = xlwt.Workbook() main() wb.save('/tmp/zabbix_log/chance_zabbix_monitor_hour_avg.xls') cur.close() conn.close() #follow is send mail server = {'name':'smtp.163.com', 'user':'ops_monitor', 'passwd':'xxxx', 'port':25} fro = 'xxx@163.com' to = ['xx@xx.com','244979152@qq.com'] now_time=((datetime.datetime.now() - datetime.timedelta(days = 1))).strftime("%Y/%m/%d") last_time=((datetime.datetime.now() - datetime.timedelta(days = 7))).strftime("%Y/%m/%d") subject = 'xx平臺監控數據【%s-%s】'%(last_time,now_time) text = 'xx你好,附件是暢思平臺最近一週天天20至24點平均值監控數據,請查收!\n有問題請聯繫鄧磊.' files = ['/tmp/zabbix_log/chance_zabbix_monitor_hour_avg.xls'] send_mail(server, fro, to, subject, text, files=files)
想修改獲取監控組的話,就把上面%%廣告%%裏廣告改成你要求的組就行,其餘的自動修改。app
腳本我放入github裏(博客裏腳本格式難調整,你們直接去github吧),地址是https://github.com/dl528888/public_script/blob/master/zabbix_hour_avg_monitor.py運維
想定時發送就把這個腳本放入crontab裏,設置好時間容許便可。
下面是個人crontab
00 14 * * 5 /usr/bin/python /data/software/zabbix_hour_avg_monitor.py
BTW:我公司如今提供IT與運維方面技術外包,主要提供物理機租賃、雲主機租賃與管理、一站式運維外包支持(包含程序部署與維護、主機維護與管理、數據庫管理與維護、CDN管理與維護、監控管理與維護等);
有須要運維外包能夠登陸官網
另外也能夠加羣溝通交流256326024