#!/usr/bin/python
#-*- coding: utf-8 -*- #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++#
#Name : collMonitorDataToDB.py #
#Created : 2017/07/06 #
#Author : @ruiy #
#Version : 2.0 #
#Copyright : 2016 ~ 2017 ahwater.net Corporation.` #
#Description : collection monitor indicator data to DB. #
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++# import pyodbc
import sys
import os
import commands
import datetime
import paramiko
import re
import ConfigParser
import codecs
import chardet
#import psutil #import sys
#reload(sys)
#sys.setdefaultencoding('utf8') #监控表字段主要:
# id,timestramp,location,ip,hostname,port,port_est_counts,cpu_use_ratio
# mem_total,mem_free,mem_use_ratio
# disk_drive_c_total,disk_drive_c_free,disk_drive_c_use_ratio
# send_flow,recv_flow,send_packets,recv_packets #
#内存
#总内存容量(单位-字节bytes): wmic memorychip get capacity
#剩余内存容量(单位-kbytes): wmic OS get FreePhysicalMemory #磁盘
#查看物理磁盘: wmic DISKDRIVE get deviceid,Caption,size,InterfaceType
#查看逻辑分区: wmic LOGICALDISK get name,Description,filesystem,size,freespace
#获取指定分区信息: fsutil volume diskfree c:
#获取磁盘分区total or fre 或用 wmic LOGICALDISK get name,Description,filesystem,size,freespace #cpu
#查看cpu核数: wmic cpu get name,addresswidth,processorid
#获取cpu实时使用率: wmic cpu get LoadPercentage #process
#process list: wmic process get Caption,KernelModeTime,UserModeTime #mystring.strip().replace(' ', '').replace('\n', '').replace('\t', '').replace('\r', '').strip() #网络流量信息 #database source read conf
dbconf=ConfigParser.SafeConfigParser()
#with codecs.open('../conf/config.properties','r',encoding='utf-8') as f:
# dbconf.readfp(f)
dbconf.read('../conf/config.properties') db_driver=dbconf.get('db','driver')
db_server=dbconf.get('db','server')
db_database=dbconf.get('db','database')
db_uid=dbconf.get('db','uid')
db_pwd=dbconf.get('db','pwd') #print type(db_server)
#print type(db_database)
#print type(db_uid)
#print type(db_pwd) #python conn sql server2008R2
#读取配置文件
"""
conn = pyodbc.connect(
driver='{sql server native client 10.0}';
server=%s;
database=%s;
uid=%s;
pwd=%s;
)
""" #debug
#conn = pyodbc.connect('driver={sql server native client 10.0};server=%s;database=%s;uid=%s;pwd=%s;'%(db_server,db_database,db_uid,db_pwd))
#conn_info = ('Driver{MySQL51};Server=%s;Port=%s;Database=%s;User=%s; Password=%s;Option=3;'%(host, port, database, user,password)) #conn1_info=('Driver={sql server native client 10.0};server=%s;database=%s;uid=%s;pwd=%s;'%('10.34.1.30','LogFeedback','sa','ahswyc'))
#print conn1_info
conn_info=('driver=%s;server=%s;database=%s;uid=%s;pwd=%s;'%(db_driver,db_server,db_database,db_uid,db_pwd))
#print conn_info
conn=pyodbc.connect(conn_info) #固定配置
#conn = pyodbc.connect('driver={sql server native client 10.0};server=10.34.1.30;database=LogFeedback;uid=sa;pwd=ahswyc;')
"""
conn = pyodbc.connect(
driver='{sql server native client 10.0}',
server='10.34.1.30',
database='LogFeedback',
uid='sa',
pwd='ahswyc'
)
""" cursor = conn.cursor() #参考调试,入库测试语句
#cursor.execute("insert into iisEstablishConnCounts(timestramp,iisHostIp,connCounts) values('2017/07/06:22:10','10.34.1.23',90)") #sql入库字段基于变量
#80端口连接数统计
#netstat -na -p tcp| findstr 80 | find /C "ESTABLISH" #cmd="netstat -na -p tcp| findstr 80 | find /C \"ESTABLISH\""
#(status,output) = commands.getstatusoutput("%s" % cmd) #python执行调用系统命令并将结果保存到变量
#注意在linux下用python执行系统命令并将结果保存到变量与windows不同如下语句
#cmd="netstat -na -p tcp| findstr 80 | find /C \"ESTABLISH\""
#(status,output) = commands.getstatusoutput("%s" % cmd) #获取监控时间戳
dt= datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
#print(dt)
dateTime =dt
#print("debug1: ",dateTime)
print("current dataTime: ",dateTime)
#获取本地的弃用,程序主要用于获取远程主机的监控数据 #调试暂时打开
#portCounts=os.popen("netstat -na -p tcp| findstr 80 | find /C \"ESTABLISH\"").read()
#print("debug2: ",portCounts) #人工配置数据
location=dbconf.get('const_18','location_18')
describe=dbconf.get('const_18','describe_18')
countport=dbconf.get('const_18','countport_18') #print(chardet.detect(location.encode('utf-8')))
#print type(location)
#op=('%s' % location).encode('gbk')
#print op
#localT=ur'省水文局'
#localT='水文局3tets1123'.decode('utf-8')
#localT=location.decode('utf-8').encode('gbk')
print location
localT=location
#localT="anhui shuiwen ju"
print("monitr location: ",localT)
#因为在被监控机部署ssh server,所以ip是同一个,都是被监控机器ip
ssh_ip=dbconf.get('ssh_18','ssh_ip_18')
mip=ssh_ip
print("monitor ip:",mip) mport=countport
print("port: ",mport) #paramiko日志
#logfiles=datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
#os.environ['logfiles'] = str(logfiles)
#paramiko.util.log_to_file(os.system(echo '../logs/$logfiles.txt'))
#paramiko.util.log_to_file("../logs/{logfiles}".txt) #远程监控数据获取
#paramiko ssh跨机建立
#transport = paramiko.Transport(('192.168.11.181',22))
tail_ip=ssh_ip.split('.')[3]
logfiles=datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
paramiko.util.log_to_file('../logs/%s-%s.txt'% (logfiles,tail_ip)) """
transport = paramiko.Transport(('10.34.1.23', 22))
transport.connect(username='ahwater', password='Aa7788..')
ssh = paramiko.SSHClient()
ssh._transport = transport
"""
#ssh conn read conf
#ssh连接属性读取配置文件
#ssh_ip=dbconf.get('ssh_18','ssh_ip_18')
ssh_port=int(dbconf.get('ssh_18','ssh_port_18'))
ssh_username=dbconf.get('ssh_18','ssh_username_18')
ssh_password=dbconf.get('ssh_18','ssh_password_18')
#print ssh_password
transport = paramiko.Transport((ssh_ip, ssh_port))
transport.connect(username=ssh_username, password=ssh_password)
ssh = paramiko.SSHClient()
ssh._transport = transport #主机名
cmd01='hostname'
stdin, stdout, stderr = ssh.exec_command(cmd01)
#print(stdout.read())
data01=stdout.read().strip().replace(' ', '').replace('\t', '').replace('\r', '').strip()
print("monitor hostname: ",data01) #端口连接数统计
#在人工配置数据处填写的端口的端口establish port counts
cmd02='netstat -na'
stdin, stdout, stderr = ssh.exec_command(cmd02)
data02=stdout.read().count('')
#data02=stdout.read().count(mport)
print("port est counts: ",data02) #cpu使用率
cmd03='wmic cpu get LoadPercentage'
stdin, stdout, stderr = ssh.exec_command(cmd03)
#da03=stdout.read().strip('LoadPercentage').replace('\n', '').replace('\t', '').replace('\r', '').replace(' ','').strip()
#da03=stdout.read().strip('LoadPercentage').replace(' ','').replace('\n','').replace('\t', '').replace('\r', '')
da03=stdout.read().strip('LoadPercentage').replace('\n', '').replace('\t', '').replace('\r', '').replace(' ','').replace('\n','').strip()
#print(da03)
statis=0
counts=0
for i in da03:
#cpu物理核心统计
counts = counts + 1
statis = statis + int(i)
try:
#print(counts)
data03=round(float(statis)/counts/100,6)
#data003="'" +data03 +"'"
print("cpu use ratio: ",data03)
except:
pass #内存总量/Gb
"""
cmd04='wmic memorychip get capacity'
stdin,stdout,stderr = ssh.exec_command(cmd04)
da04=stdout.read().strip('Capacity').replace('\n','').replace('\t','').replace('\r','').replace(' ','').strip()
data04=float(da04)/1024/1024/1024
print("mem total Gb: ",data04)
"""
cmd04='wmic memorychip get capacity'
stdin,stdout,stderr = ssh.exec_command(cmd04)
d4_1=stdout.read().strip('Capacity').replace(' ','').replace('\t','').replace('\r','').strip()
d4_2=(' '.join(filter(lambda x: x, d4_1.split(' '))))
d4_3=d4_2.split('\n') counts_4=0
for i in d4_3:
counts_4 = counts_4 + int(i) data04=float(counts_4)/1024/1024/1024
print("mem total Gb: ",data04) #内存剩余量/Gb
cmd05='wmic OS get FreePhysicalMemory'
stdin,stdout,stderr = ssh.exec_command(cmd05)
da05=stdout.read().strip('FreePhysicalMemory').replace('\n','').replace('\t','').replace('\r','').replace(' ','').strip()
data05=round(float(da05)/1024/1024,4)
print("mem free Gb: ",data05) #内存使用率
data06=round(float((data04 - data05)) / data04,4)
print("mem use ratio: ",data06) #磁盘信息,根系统盘C:
#cmd07='fsutil volume diskfree c:'
cmd07='wmic LOGICALDISK get FreeSpace,Size'
#C盘总量
stdin,stdout,stderr = ssh.exec_command(cmd07)
#删除FreeSpace,Size字符
d7_1=stdout.read().strip().replace('FreeSpace','').replace('Size','')
#删除r-n
d7_2=d7_1.strip().replace('\r','').replace('\n','')
#替换多个' '为单个
d7_3=(' '.join(filter(lambda x: x, d7_2.split(' '))))
#转换str->list
disk_data=d7_3.split(' ') #获取C分区盘总量Gb,获取的数据默认单位是bytes
data07=round(float(disk_data[1])/1024/1024/1024,4)
print("C disk total Gb:",data07)
#获取C分区盘剩余量Gb
data08=round(float(disk_data[0])/1024/1024/1024,4)
print("C disk free Gb:",data08)
#C分区盘使用率
data09=round((data07 - data08) / data07,4)
print("C disk space use ratio: ",data09) #获取网络流量信息
cmd08='netstat -e'
stdin, stdout, stderr = ssh.exec_command(cmd08)
d8_1=stdout.read().strip().rstrip().lstrip().replace('\r','').replace('\n','')
d8_2=(' '.join(filter(lambda x: x, d8_1.split(' '))))
d8_3=d8_2.split(' ') #数据流默认bytes,把汉字字符剔除
net_data=re.sub('[^\u4e00-\u9fa5]','',d8_3[4])
#发送的流量累加总计Gb
data10=round(float(net_data)/1024/1024/1024,4)
print("send trafic flow Gb: ",data10)
#接收的流量累加总计
#net_data2=re.sub('[^\u4e00-\u9fa5]','',d8_3[3])
net_data2=d8_3[3]
data11=round(float(net_data2)/1024/1024/1024,4)
print("recv trafic flow Gb: ",data11) #发送的数据包累加总数Tcp/ip层
#发送的数据包累加总数
data12=int(re.sub('[^\u4e00-\u9fa5]','',d8_3[6]))
#data12=round(float(net_data3)/1024/1024/1024,4)
print("send packets: ",data12) #接收的数据包累计
data13=int(d8_3[5])
print("recv packets: ",data13) #数据入库
#字段值基于变量的sql语句模型
sql_debug = """insert into iisEstablishConnCounts(timestramp,iisHostIp,connCounts) values(
%(timestramp)s,
'10.34.1.23',
%(connCounts)s
)
"""
# id,timestramp,location,ip,hostname,port,port_est_counts,cpu_use_ratio
# mem_total,mem_free,mem_use_ratio
# disk_drive_c_total,disk_drive_c_free,disk_drive_c_use_ratio
# send_flow,recv_flow,send_packets,recv_packets sql = """insert into ahwater_perf_monitor(timestramp,location,ip,hostname,host_use_description,port,port_est_counts,cpu_use_ratio,
mem_total,mem_free,mem_use_ratio,
disk_drive_c_total,disk_drive_c_free,disk_drive_c_use_ratio,
send_flow,recv_flow,send_packets,recv_packets) values(
%(timestramp)s,
%(location)s,
%(ip)s,
%(hostname)s,
%(host_use_description)s,
%(port)s,
%(port_est_counts)s,
%(cpu_use_ratio)s,
%(mem_total)s,
%(mem_free)s,
%(mem_use_ratio)s,
%(disk_drive_c_total)s,
%(disk_drive_c_free)s,
%(disk_drive_c_use_ratio)s,
%(send_flow)s,
%(recv_flow)s,
%(send_packets)s,
%(recv_packets)s
)
""" #print(sql)
#cursor.execute(sql_debug % dict(timestramp = dateTime,connCounts = portCounts)) #print("\n") """
print dateTime
print localT
print mip
print data01
print mport
print data02
print data03
print data04
print data05
print data06
print data07
print data08
print data09
print data10
print data11
print data12
print data13
"""
#print describe cursor.execute(sql % dict(
timestramp="'" + dateTime + "'",
location="'" + localT + "'",
ip="'" + mip + "'",
hostname="'" + data01 + "'",
host_use_description="'" + describe + "'",
port=mport,
port_est_counts=data02,
cpu_use_ratio=data03,
mem_total=data04,
mem_free=data05,
mem_use_ratio=data06,
disk_drive_c_total=data07,
disk_drive_c_free=data08,
disk_drive_c_use_ratio=data09,
send_flow=data10,
recv_flow=data11,
send_packets=data12,
recv_packets=data13
)) """
cursor.execute(sql % dict(
timestramp=dateTime,
location=localT,
ip=mip,
hostname=data01,
port=mport,
port_est_counts="'" +str(data02) + "'",
cpu_use_ratio="," +str(data03) + "'",
mem_total="'"+str(data04)+"'",
mem_free="'"+str(data05)+"'",
mem_use_ratio="'"+str(data06)+"'",
disk_drive_c_total="'"+str(data07)+"'",
disk_drive_c_free="'" +str(data08)+"'",
disk_drive_c_use_ratio="'"+str(data09)+"'",
send_flow="'"+str(data10)+"'",
recv_flow="'"+str(data11)+"'",
send_packets=data12,
recv_packets=data13
))
""" #关闭pyodbc conn连接
conn.commit()
conn.close()
#关闭paramiko ssh回话
transport.close()

python window使用paramiko简单监控数据指标数据采集的更多相关文章

  1. docker-compose(grafana influxdb) + telegraf 快速搭建简单监控

     灵活实现方案:   1:     telegraf 为go 语言写得占用内存小 收集主机各项监控数据 定时写入 时序DB   influxdb ------------------------&qu ...

  2. Python调用Prometheus监控数据并计算

    Prometheus是什么 Prometheus是一套开源监控系统和告警为一体,由go语言(golang)开发,是监控+报警+时间序列数 据库的组合.适合监控docker容器.因为kubernetes ...

  3. 使用python获取整月每一天的系统监控数据生成报表

    1.安装阿里开源监控工具tsar tsar官方网站 wget -O tsar.zip https://github.com/alibaba/tsar/archive/master.zip --no-c ...

  4. 用python实现简单EXCEL数据统计的实例

    用python实现简单EXCEL数据统计的实例 下面小编就为大家带来一篇用python实现简单EXCEL数据统计的实例.小编觉得挺不错的,现在就分享给大家,也给大家做个参考.一起跟随小编过来看看吧 任 ...

  5. Python之路,Day20 - 分布式监控系统开发

    Python之路,Day20 - 分布式监控系统开发   本节内容 为什么要做监控? 常用监控系统设计讨论 监控系统架构设计 监控表结构设计 为什么要做监控? –熟悉IT监控系统的设计原理 –开发一个 ...

  6. Python常用的库简单介绍一下

    Python常用的库简单介绍一下fuzzywuzzy ,字符串模糊匹配. esmre ,正则表达式的加速器. colorama 主要用来给文本添加各种颜色,并且非常简单易用. Prettytable ...

  7. 探索Windows Azure 监控和自动伸缩系列2 - 获取虚拟机的监控定义和监控数据

    上一篇博文介绍了如何连接Windows Azure: http://www.cnblogs.com/teld/p/5113063.html 本篇我们继续上次的示例代码,获取虚拟机的监控定义和监控数据. ...

  8. Python股票分析系列——基础股票数据操作(一).p3

    该系列视频已经搬运至bilibili: 点击查看 欢迎来到Python for Finance教程系列的第3部分.在本教程中,我们将使用我们的股票数据进一步分解一些基本的数据操作和可视化.我们将要使用 ...

  9. [博客迁移]探索Windows Azure 监控和自动伸缩系列2 - 获取虚拟机的监控定义和监控数据

    上一篇博文介绍了如何连接Windows Azure: http://www.cnblogs.com/teld/p/5113063.html 本篇我们继续上次的示例代码,获取虚拟机的监控定义和监控数据. ...

随机推荐

  1. 目标检测 — NMS

    1.非极大值抑制步骤 非极大值抑制算法(Non-maximum suppression,NMS)在目标检测中经常用到.我们的检测算法可能对同一目标产生多次检测的结果,非极大值抑制算法可以保证每个目标只 ...

  2. 关于wm8740数据手册的严重错误

    之前的一个项目使用了双wm8741的差动解码器,后来更换为双wm8740.由于8740不支持I2C通信,软控也就由I2C改为了SPI. 由于是双wm8740,需要一片负责左声道,一片负责右声道.因此要 ...

  3. iOS 检查指定日期是否在当前日期之前

    iOS检查指定日期是否在当前日期之前, 直接上代码: - (BOOL)checkProductDate: (NSString *)tempDate { NSDateFormatter *dateFor ...

  4. 按返回键退出程序但不销毁代码,像QQ一样,后台运行

    @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BA ...

  5. oracle后台进程简介

    一:database write--数据写入  DBWR    作用:把SGA中被修改的数据同步到磁盘文件中.保证Buffer Cache中有足够的空闲数据块数量.    PS:如果LGWR出现故障, ...

  6. 手动安装mysql-5.0.45.tar.gz

    Linux下编译安装 安装环境:VMware9(桥接模式) + Linux bogon 2.6.32-642.3.1.el6.x86_64(查看linux版本信息:uname -a) 先给出MySQL ...

  7. mysql触发器与hash索引

    url查询哈希值的维护 触发器 2.1 创建表 pseudohash. 2.2 创建触发器,当对表进行插入和更新时,触发 触发器 delimiter |create trigger pseudohas ...

  8. Arc066_F Contest with Drinks Hard

    传送门 题目大意 有一个长为$N$的序列$A$,你要构造一个长为$N$的$01$序列使得$01$序列全部由$1$组成的子串个数$-$两个序列的对应位置两两乘积之和最大,每次独立的询问给定$pos,x$ ...

  9. UVA - 11768 Lattice Point or Not (扩展欧几里得)

    求一条线段上有多少个整点. 是道扩欧基础题,列出两点式方程,然后分四种情况讨论即可.但细节处理较多很容易写挫(某zzWA了十几发才过掉的). 由于数据精度较小,浮点数比较没有用eps,直接==比较了. ...

  10. 【2】HashMap

    http://www.cnblogs.com/xwdreamer/archive/2012/06/03/2532832.html 一:java的数据结构 在Java编程语言中,最基本的结构就是两种,一 ...