nginx日志统计分析

| |
[不指定 2017/10/18 23:13 | by 刘新修 ]

 本文主要使用的是grep,awk,cut等工具来对nginx日志进行统计和分析,具体如下:

 

1,列出当天访问最多次数的ip地址

cut -d- -f 1 /usr/local/nginx/logs/20160329/access_2016032913.log |uniq -c | sort -rn | head -20 

 

C#代码
  1. [root@httpservera 20160329]# cut -d- -f 1 /usr/local/nginx/logs/20160329/access_2016032913.log |uniq -c | sort -rn | head -20    
  2.      69 180.116.214.31   
  3.      45 180.116.214.31   
  4.      45 180.116.214.31   
  5.      36 49.80.54.111   
  6.      35 183.206.185.204   
  7.      35 180.116.214.31   
  8.      32 49.80.54.111   
  9.      32 49.80.54.111   
  10.      32 180.116.214.31   
  11.      31 117.136.45.101   
  12.      29 180.116.214.31   
  13.      28 218.205.19.112   
  14.      28 180.116.214.31   
  15.      28 180.116.214.31   
  16.      27 49.80.54.111   
  17.      27 222.185.248.242   
  18.      24 49.80.54.111   
  19.      24 175.0.8.161   
  20.      23 49.80.54.111   
  21.      23 49.80.54.111  

 2,查看某一个页面被访问的次数

[root@httpservera 20160329]#grep "/index.php" log_file | wc -l

 

3,查看每一个IP访问了多少页面并排序

awk '{++S[$1]} END {for (a in S) print a,S[a]}' access_2016032913.log |uniq|sort -rn|more

C#代码
  1. [root@httpservera 20160329]# awk '{++S[$1]} END {for (a in S) print a,S[a]}' access_2016032913.log |uniq|sort -rn|more  
  2. 223.94.229.51 148  
  3. 223.73.166.191 1  
  4. 223.68.252.103 156  
  5. 223.68.167.66 2  
  6. 223.68.106.138 43  
  7. 223.67.99.72 7  
  8. 223.67.153.173 12  
  9. 223.66.93.152 15  
  10. 223.66.38.31 103  
  11. 223.65.191.181 1  
  12. 223.65.191.135 11  
  13. 223.65.190.71 13  
  14. 223.65.141.78 3  
  15. 223.64.63.71 31  
  16. 223.64.63.229 7  
  17. 223.64.62.242 59  
  18. 223.64.62.23 27  
  19. 223.64.62.216 1  
  20. 223.64.62.160 40  
  21. 223.64.61.136 28  
  22. 223.64.60.80 13  
  23. 223.64.60.21 12  
  24. 223.64.237.37 187  
  25. 223.64.209.247 2  
  26. 223.64.158.4 15  

其中,sort -rn 按照数字从大到小排序,uniq 将重复行去除。

 

4,查看某一个ip访问了那些页面:grep ^xx.xx.xx.xx log_file |awk '{print $1,$7}'

C#代码
  1. [root@httpservera 20160329]# grep ^223.147.39.194 17 access_2016032913.log |awk '{print $1,$7}'          grep: 17: No such file or directory  
  2. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  3. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  4. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  5. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  6. access_2016032913.log:223.147.39.194 //thirdpartyapi/appaction/app_action/action_send_batch.json  
  7. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  8. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  9. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  10. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  11. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  12. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  13. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  14. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  15. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  16. access_2016032913.log:223.147.39.194 //customer/customerInfo/getCustUnReadMsgInfo.json  
  17. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  
  18. access_2016032913.log:223.147.39.194 //remind/redDot/checkRedDot.json  

5,去掉搜索引擎统计当天的页面:awk '{print $12,$1}' access_2016032913.log | grep ^\"Mozilla | awk '{print $2}' |sort | uniq | wc -l    

C#代码
  1. [root@httpservera 20160329]# awk '{print $12,$1}' access_2016032913.log | grep ^\"Mozilla | awk '{print $2}' |sort | uniq | wc -l        
  2. 35  

6,查看一个小时内有多少ip访问:

C#代码
  1. [root@httpservera 20160329]# awk '{print $4,$1}' access_2016032913.log | grep 29/Mar/2016:13 | awk '{print $2}'| sort | uniq | wc -l     
  2. 1926  

2.访问量统计

 

1.根据访问IP统计UV

awk '{print $1}'  access.log|sort | uniq -c |wc -l

 

2.统计访问URL统计PV

awk '{print $7}' access.log|wc -l

 

3.查询访问最频繁的URL

awk '{print $7}' access.log|sort | uniq -c |sort -n -k 1 -r|more

 

4.查询访问最频繁的IP

awk '{print $1}' access.log|sort | uniq -c |sort -n -k 1 -r|more

 

5.根据时间段统计查看日志

 cat  access.log| sed -n '/14\/Mar\/2015:21/,/14\/Mar\/2015:22/p'|more

C#代码
  1. 1.根据访问IP统计UV  
  2. awk '{print $1}'  access.log|sort | uniq -c |wc -l  
  3. 2.统计访问URL统计PV  
  4. awk '{print $7}' access.log|wc -l  
  5. 3.查询访问最频繁的URL  
  6. awk '{print $7}' access.log|sort | uniq -c |sort -n -k 1 -r|more  
  7. 4.查询访问最频繁的IP  
  8. awk '{print $1}' access.log|sort | uniq -c |sort -n -k 1 -r|more  
  9. 5.根据时间段统计查看日志  
  10.  cat  access.log| sed -n '/14\/Mar\/2015:21/,/14\/Mar\/2015:22/p'|more  

备注:nginx 日志分割脚本

 

C#代码
  1. [root@iZ237lzm354Z logs]# vim  /opt/shell/nginx_log.sh  
  2. #! /bin/bash  
  3. #Power by guojinbao  
  4. date=`date +%Y-%m-%d-%H-%M-%S`  
  5. logfile="/guojinbao/nginx/logs/access.log"  
  6. logdir=/guojinbao/nginx/logs  
  7. pid=`cat /usr/local/nginx/logs/nginx.pid`  
  8. if [ ! -d $logdir ]; then  
  9.     mkdir -p $logdir  
  10. fi  
  11. /bin/mv $logfile $logdir/access_${date}.log  
  12. kill -HUP $pid  
Linux/Unix | 评论(0) | 引用(0) | 阅读(815)