1.根据访问IP统计UV
awk '{print $1}' access.log|sort | uniq -c |wc -l
2.统计访问URL统计PV
awk '{print $7}' access.log|wc -l
3.查询访问最频繁的URL
awk '{print $7}' access.log|sort | uniq -c |sort -n -k 1 -r|more
4.查询访问最频繁的IP
awk '{print $1}' access.log|sort | uniq -c |sort -n -k 1 -r|more
5.根据时间段统计查看日志
cat access.log| sed -n '/14\/Mar\/2015:21/,/14\/Mar\/2015:22/p'|more
6.awk分割信息后获取最后一列
cat $(ll /home/sdzw/tcf/20110914_001/|awk '{print $NF}')|grep "abc"
7.访问最多
awk '{print $1}' 日志地址 | sort | uniq -c | sort -n -k 1 -r | head -n 100
8.输出日志
awk -F' ' '{print "\n["NR-1"]\nmd5="$1"\nurl= >111.ini
9.统计排除
cat xxx.xxx.com.access.log-20160616|grep -i -v -E "bing|baidu|google|sougou"|awk '{print $1}'|sort | uniq -c | sort -n -k 1 -r | head -n 20