Nginx 日志切割
2016-04-05 13:00
549 查看
#多虚拟主机日志切割
#!/bin/bash
for logs_path in `cd /opt/logs && ls ./`
do
cd /opt/logs/${logs_path}
mkdir -p ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/
mv access.log ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/access_$(date -d "yesterday" +"%Y%m%d").log
done
kill -USR1 `cat /opt/openresty/nginx/logs/nginx.pid `
#单个虚拟主机日志切割
#!/bin/bash
logs_path="/usr/local/webserver/nginx/logs/"
mkdir -p ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/
mv ${logs_path}access.log ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/access_$(date -d "yesterday" +"%Y%m%d").log
kill -USR1 `cat /usr/local/webserver/nginx/nginx.pid`
# crontab -e 增加一个定时任务
0 0 * * * /bin/bash /usr/local/nginx/sbin/cut-log.sh
1,根据访问ip统计UV
# awk '{print $1}' http.log |sort | uniq -c |wc -l
2,统计访问url统计PV
# awk '{print $7}' http.log |wc -l
3,统计访问最频繁的url
# awk '{print $7}' http.log |sort | uniq -c |sort -n -k 1 -r | more
4,查询访问最频繁的ip
# awk '{print $1}' http.log |sort | uniq -c |sort -n -k 1 -r| more
5,根据时间段统计查看日志
# sed -n '/13\/May\/2016:12/,/14\/May\/2016:12/p' http.log | more
#!/bin/bash
for logs_path in `cd /opt/logs && ls ./`
do
cd /opt/logs/${logs_path}
mkdir -p ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/
mv access.log ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/access_$(date -d "yesterday" +"%Y%m%d").log
done
kill -USR1 `cat /opt/openresty/nginx/logs/nginx.pid `
#单个虚拟主机日志切割
#!/bin/bash
logs_path="/usr/local/webserver/nginx/logs/"
mkdir -p ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/
mv ${logs_path}access.log ${logs_path}$(date -d "yesterday" +"%Y")/$(date -d "yesterday" +"%m")/access_$(date -d "yesterday" +"%Y%m%d").log
kill -USR1 `cat /usr/local/webserver/nginx/nginx.pid`
# crontab -e 增加一个定时任务
0 0 * * * /bin/bash /usr/local/nginx/sbin/cut-log.sh
1,根据访问ip统计UV
# awk '{print $1}' http.log |sort | uniq -c |wc -l
2,统计访问url统计PV
# awk '{print $7}' http.log |wc -l
3,统计访问最频繁的url
# awk '{print $7}' http.log |sort | uniq -c |sort -n -k 1 -r | more
4,查询访问最频繁的ip
# awk '{print $1}' http.log |sort | uniq -c |sort -n -k 1 -r| more
5,根据时间段统计查看日志
# sed -n '/13\/May\/2016:12/,/14\/May\/2016:12/p' http.log | more
相关文章推荐
- nginx代理指定目录
- 访问Nginx发生SSL connection error的一种情况
- Nginx+Naxsi部署专业级Web应用防火墙
- CentOS 6.2实战部署Nginx+MySQL+PHP
- nginx中http核心模块的配置指令2
- nginx中http核心模块的配置指令3
- nginx中http核心模块的配置指令4
- nginx中http的fastcgi模块的配置指令1
- Awstats处理多apache日志
- Nginx 学习笔记(一)
- 网站502与504错误分析
- 用zabbix监控nginx_status状态
- 艰难完成 nginx + puma 部署 rails 4的详细记录
- 典型入侵日志分析
- 把Lua编译进nginx步骤方法
- MS SQL Server数据库清理错误日志的方法
- 对MySQL日志操作的一些基本命令总结
- 简介操作MySQL日志的一些方法
- MySQL的日志基础知识及基本操作学习教程