1,http日誌分析
html
#!/bin/bash for i in $@;do echo ===================== "$i" =============================>>weblog.txt echo "IP data">>weblog.txt awk '{print $1}' $i |wc -l>>weblog.txt awk '{print $1}' $i |sort | uniq -c |wc -l>>weblog.txt echo "sokect data">>weblog.txt awk -F'"' '{print $8}' $i |grep -v "^-" |wc -l>>weblog.txt awk -F'"' '{print $8}' $i |grep -v "^-" |sort|uniq -c | wc -l>>weblog.txt echo "sokect qu chong">>weblog.txt awk -F'"' '{print $8}' $i |grep -v "^-" |awk '/^http/ {++state[$NF]} END {for(key in state) print key,",",state[key]}' >> $i.csv echo -e "\n\n" done,
2,nginx日誌分析nginx
#!/bin/bash for i in $@;do echo ===================== "$i" =============================>>weblog.txt echo "IP data">>weblog.txt awk '{print $1}' $i |wc -l>>weblog.txt awk '{print $1}' $i |sort | uniq -c |wc -l>>weblog.txt echo "sokect data">>weblog.txt awk -F'"' '{print $9}' $i |grep -v "^-" |wc -l>>weblog.txt awk -F'"' '{print $9}' $i |grep -v "^-" |sort|uniq -c | wc -l>>weblog.txt echo "sokect qu chong">>weblog.txt awk -F'"' '{print $9}' $i |grep -v "^-" |awk '/^http/ {++state[$NF]} END {for(key in state) print key,",",state[key]}' >> $i.csv echo -e "\n\n" done
說明:獲取IP和訪問接口的域根據web配置文件中日誌格式的順序得到web
3,web日誌某時間段IP、PV分析bash
grep "01\/May\/2014:20:.* +0800" access_log.20140501 >> /data/httpd/fenxi.logide
awk '{print $1}' fenxi.log | wc -l |more日誌
awk '{print $1}' fenxi.log | sort | uniq -c |wc -lhtm