nagios 监控hadoop脚本
2013-11-01 14:24
176 查看
1、[root@localhost libexec]# cat check_hadoop_dfs.py
'''
Created on 20131101
@author: yangyang.feng
'''
#!/usr/bin/env python
import commands
import sys
from optparse import OptionParser
import urllib
import re
def get_dfs_free_percent():
urlItem = urllib.urlopen("http://192.168.20.201:50070/dfshealth.jsp")
html = urlItem.read()
urlItem.close()
return float(re.findall('.+<td id="col1"> DFS Remaining%<td id="col2"> :<td id="col3">\\s+(.+)%<tr class="rowNormal">.+', html)[0])
if __name__ == '__main__':
parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
parser.add_option("-w", "--warning", type="int", dest="w", default=30, help="total dfs used percent")
parser.add_option("-c", "--critical", type="int", dest="c", default=20, help="total dfs used percent")
(options, args) = parser.parse_args()
if(options.c >= options.w):
print '-w must greater then -c'
sys.exit(1)
dfs_free_percent = get_dfs_free_percent()
if(dfs_free_percent <= options.c ) :
print 'CRITICAL HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(2)
elif(dfs_free_percent <= options.w):
print 'WARNING HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(1)
else:
print 'OK HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(0)
[root@localhost libexec]#
2、[root@localhost libexec]# cat check_hadoop_datanode.py
'''
Created on 20131101
@author: yangyang.feng
'''
#!/usr/bin/env python
import commands
import sys
from optparse import OptionParser
import urllib
import re
def get_value():
urlItem = urllib.urlopen("http://192.168.20.201:50070/dfshealth.jsp")
html = urlItem.read()
urlItem.close()
return float(re.findall('.+Live Nodes</a> <td id="col2"> :<td id="col3">\\s+(\d+)<tr class="rowAlt">.+', html)[0])
if __name__ == '__main__':
parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
parser.add_option("-w", "--warning", type="int", dest="w", default=1)
parser.add_option("-c", "--critical", type="int", dest="c", default=2)
(options, args) = parser.parse_args()
if(options.c >= options.w):
print '-w must greater then -c'
sys.exit(1)
value = get_value()
if(value <= options.c ) :
print 'CRITICAL - Live Nodes %d' %(value)
sys.exit(2)
elif(value <= options.w):
print 'WARNING - Live Nodes %d' %(value)
sys.exit(1)
else:
print 'OK - Live Nodes %d' %(value)
sys.exit(0)
[root@localhost libexec]#
3、[root@localhost libexec]# cat check_hadoop_tasktracker.py
'''
Created on 20131101
@author: yangyang.feng
'''
#!/usr/bin/env python
import commands
import sys
from optparse import OptionParser
import urllib
import re
def get_value():
urlItem = urllib.urlopen("http://192.168.20.201:50030/jobtracker.jsp")
html = urlItem.read()
urlItem.close()
return float(re.findall('.+<a href="machines.jsp\?type=active">(\d+)</a>.+', html)[0])
if __name__ == '__main__':
parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
parser.add_option("-w", "--warning", type="int", dest="w", default=2)
parser.add_option("-c", "--critical", type="int", dest="c", default=1)
(options, args) = parser.parse_args()
if(options.c >= options.w):
print '-w must greater then -c'
sys.exit(1)
value = get_value()
if(value <= options.c ) :
print 'CRITICAL HADOOP Live Tasktracker: %d' %(value)
sys.exit(2)
elif(value <= options.w):
print 'WARNING HADOOP Live Tasktracker: %d' %(value)
sys.exit(1)
else:
print 'OK HADOOP Live Tasktracker: %d' %(value)
sys.exit(0)
[root@localhost libexec]#
'''
Created on 20131101
@author: yangyang.feng
'''
#!/usr/bin/env python
import commands
import sys
from optparse import OptionParser
import urllib
import re
def get_dfs_free_percent():
urlItem = urllib.urlopen("http://192.168.20.201:50070/dfshealth.jsp")
html = urlItem.read()
urlItem.close()
return float(re.findall('.+<td id="col1"> DFS Remaining%<td id="col2"> :<td id="col3">\\s+(.+)%<tr class="rowNormal">.+', html)[0])
if __name__ == '__main__':
parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
parser.add_option("-w", "--warning", type="int", dest="w", default=30, help="total dfs used percent")
parser.add_option("-c", "--critical", type="int", dest="c", default=20, help="total dfs used percent")
(options, args) = parser.parse_args()
if(options.c >= options.w):
print '-w must greater then -c'
sys.exit(1)
dfs_free_percent = get_dfs_free_percent()
if(dfs_free_percent <= options.c ) :
print 'CRITICAL HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(2)
elif(dfs_free_percent <= options.w):
print 'WARNING HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(1)
else:
print 'OK HADOOP DFS. free %d%%' %(dfs_free_percent)
sys.exit(0)
[root@localhost libexec]#
2、[root@localhost libexec]# cat check_hadoop_datanode.py
'''
Created on 20131101
@author: yangyang.feng
'''
#!/usr/bin/env python
import commands
import sys
from optparse import OptionParser
import urllib
import re
def get_value():
urlItem = urllib.urlopen("http://192.168.20.201:50070/dfshealth.jsp")
html = urlItem.read()
urlItem.close()
return float(re.findall('.+Live Nodes</a> <td id="col2"> :<td id="col3">\\s+(\d+)<tr class="rowAlt">.+', html)[0])
if __name__ == '__main__':
parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
parser.add_option("-w", "--warning", type="int", dest="w", default=1)
parser.add_option("-c", "--critical", type="int", dest="c", default=2)
(options, args) = parser.parse_args()
if(options.c >= options.w):
print '-w must greater then -c'
sys.exit(1)
value = get_value()
if(value <= options.c ) :
print 'CRITICAL - Live Nodes %d' %(value)
sys.exit(2)
elif(value <= options.w):
print 'WARNING - Live Nodes %d' %(value)
sys.exit(1)
else:
print 'OK - Live Nodes %d' %(value)
sys.exit(0)
[root@localhost libexec]#
3、[root@localhost libexec]# cat check_hadoop_tasktracker.py
'''
Created on 20131101
@author: yangyang.feng
'''
#!/usr/bin/env python
import commands
import sys
from optparse import OptionParser
import urllib
import re
def get_value():
urlItem = urllib.urlopen("http://192.168.20.201:50030/jobtracker.jsp")
html = urlItem.read()
urlItem.close()
return float(re.findall('.+<a href="machines.jsp\?type=active">(\d+)</a>.+', html)[0])
if __name__ == '__main__':
parser = OptionParser(usage="%prog [-w] [-c]", version="%prog 1.0")
parser.add_option("-w", "--warning", type="int", dest="w", default=2)
parser.add_option("-c", "--critical", type="int", dest="c", default=1)
(options, args) = parser.parse_args()
if(options.c >= options.w):
print '-w must greater then -c'
sys.exit(1)
value = get_value()
if(value <= options.c ) :
print 'CRITICAL HADOOP Live Tasktracker: %d' %(value)
sys.exit(2)
elif(value <= options.w):
print 'WARNING HADOOP Live Tasktracker: %d' %(value)
sys.exit(1)
else:
print 'OK HADOOP Live Tasktracker: %d' %(value)
sys.exit(0)
[root@localhost libexec]#
相关文章推荐
- 脚本添加nagios监控主机(带分析) 推荐
- Nagios 利用NSClient++的check_nrpe方式使用自定义脚本监控windows
- nagios 流量监控和报警的shell脚本
- 自定义nagios监控脚本
- nagios监控温度脚本
- nagios 使用脚本 监控内存/磁盘使用率
- Hadoop YARN学习之监控集群监控Nagios(4)
- nagios监控haproxy(借助脚本)
- Nagios监控ORACLE ALERT日志脚本
- nagios插件-监控tcp状态连接数shell脚本
- nagios 监控内存脚本
- nagios监控linux主机监控内存脚本
- 一键搭建nagios监控系统之一 脚本篇
- Nagios监控内存脚本
- nagios 监控shell脚本
- nagios 监控页面脚本
- Nagios监控LINUX /var/log/message脚本
- shell脚本监控linux系统内存使用情况的方法(不使用nagios监控linux)
- 懒人nagios页面监控脚本O(∩_∩)O~
- nagios监控redis内存使用情况脚本--shell实现