您的位置:首页 > 运维架构 > Nginx

利用python获取nginx服务的ip以及流量统计信息

2017-08-03 16:15 856 查看
#!/usr/bin/python
#coding=utf8

log_file = "/usr/local/nginx/logs/access.log"

with open(log_file) as f:
contexts = f.readlines()

# define ip dict###
ip = {}     # key为ip信息,value为ip数量(若重复则只增加数量)
flow = {}   # key为ip信息,value为流量总和
sum = 0

for line in contexts:
# count row size of flow
size = line.split()[9]
# print ip
ip_attr = line.split()[0]
# count total size of flow
sum = int(size) + sum
if ip_attr in ip.keys():   # if ip repeated,如果ip重复就将ip数量加一,而流量继续叠加
# count of ip plus 1
ip[ip_attr] = ip[ip_attr] + 1
# size of flow plus size
flow[ip_attr] = flow[ip_attr] + int(size)
else:
# if ip not repeated
# define initial values of count of ip and size of flow
ip[ip_attr] = 1
flow[ip_attr] = int(size)

print(ip)
print(flow)
print(sum/1024/1024)

 

 

现在有nginx的访问日志:

[root@weblogic ~]# cat access.log
192.168.223.1 - - [18/Jul/2017:10:21:25 +0800] "GET /favicon.ico HTTP/1.1" 192.168.223.136:8080 404 24 "http://192.168.223.136:8080/proxy_path/index.html" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "-"
192.168.223.136 "192.168.223.1" - - [17/Jul/2017:17:06:44 +0800] "GET /index.html HTTP/1.0" "192.168.223.136" 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko" "192.168.223.1"
192.168.223.1 - - [18/Jul/2017:10:30:12 +0800] "GET /proxy_path/index.html HTTP/1.1" 192.168.223.136:8080 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "-"
192.168.223.136 "192.168.223.1" - - [18/Jul/2017:10:30:12 +0800] "GET /index.html HTTP/1.0" "192.168.223.137" 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "192.168.223.1"
192.168.223.1 - - [18/Jul/2017:10:38:38 +0800] "GET /proxy_path/index.html HTTP/1.1" 192.168.223.136:8080 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "-"
192.168.223.136 "192.168.223.1" - - [18/Jul/2017:10:38:38 +0800] "GET /index.html HTTP/1.0" "192.168.223.136:80" 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "192.168.223.1"
192.168.223.1 - - [18/Jul/2017:10:45:07 +0800] "GET /proxy_path/index.html HTTP/1.1" 192.168.223.136:8080 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "-"
192.168.223.136 "192.168.223.1" - - [18/Jul/2017:10:45:07 +0800] "GET /index.html HTTP/1.0" "192.168.223.136:80" 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "192.168.223.1"
192.168.223.1 - - [18/Jul/2017:10:51:25 +0800] "GET /proxy_path/index.html HTTP/1.1" 192.168.223.136:8080 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "-"
192.168.223.136 "192.168.223.1" - - [18/Jul/2017:10:51:25 +0800] "GET /index.html HTTP/1.0" "192.168.223.136:80" 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36" "192.168.223.1"

利用python将nginx的ip进行统计

思路:将文件内容一行一行都出来,然后进行字符串strip().split(),得到一个列表,列表的第一个元素就是ip的内容

初始化一个空字典,用key表示ip内容,value表示该ip的个数,如果ip重复了,则将value进行增加:

[root@weblogic ~]# cat nginx.py
#!/usr/bin/python
#coding=utf8

log_file = "/root/access.log"
ip = {}

with open(log_file) as f:
for i in f.readlines():
print i.strip().split()[0]
ip_attr = i.strip().split()[0]
if ip_attr in ip.keys():           # 如果ip存在于字典中,则将该ip的value也就是个数进行增加
ip[ip_attr] = ip[ip_attr] + 1
else:
ip[ip_attr] = 1

print ip

获取执行结果:

[root@weblogic ~]# python nginx.py
192.168.223.1
192.168.223.136
192.168.223.1
192.168.223.136
192.168.223.1
192.168.223.136
192.168.223.1
192.168.223.136
192.168.223.1
192.168.223.136
{'192.168.223.1': 5, '192.168.223.136': 5}

 

内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: 
相关文章推荐