2013-07-05 13:27:43 +02:00
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# Description
|
|
|
|
# -----------
|
|
|
|
#
|
|
|
|
# This is one for the system administrator, operation and maintenance.
|
|
|
|
#
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
# Authors
|
|
|
|
# -------
|
|
|
|
#
|
|
|
|
# * Dongweiming <ciici123@gmail.com>
|
|
|
|
#
|
|
|
|
# ------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
function retlog() {
|
|
|
|
if [[ -z $1 ]];then
|
|
|
|
echo '/var/log/nginx/access.log'
|
|
|
|
else
|
|
|
|
echo $1
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
alias ping='ping -c 5'
|
2018-10-17 19:40:20 +02:00
|
|
|
alias clr='clear; echo Currently logged in on $TTY, as $USER in directory $PWD.'
|
|
|
|
alias path='print -l $path'
|
2013-07-05 13:27:43 +02:00
|
|
|
alias mkdir='mkdir -pv'
|
|
|
|
# get top process eating memory
|
|
|
|
alias psmem='ps -e -orss=,args= | sort -b -k1,1n'
|
|
|
|
alias psmem10='ps -e -orss=,args= | sort -b -k1,1n| head -10'
|
|
|
|
# get top process eating cpu if not work try excute : export LC_ALL='C'
|
|
|
|
alias pscpu='ps -e -o pcpu,cpu,nice,state,cputime,args|sort -k1 -nr'
|
|
|
|
alias pscpu10='ps -e -o pcpu,cpu,nice,state,cputime,args|sort -k1 -nr | head -10'
|
|
|
|
# top10 of the history
|
|
|
|
alias hist10='print -l ${(o)history%% *} | uniq -c | sort -nr | head -n 10'
|
|
|
|
|
|
|
|
# directory LS
|
|
|
|
dls () {
|
2018-10-17 19:40:20 +02:00
|
|
|
print -l *(/)
|
2013-07-05 13:27:43 +02:00
|
|
|
}
|
|
|
|
psgrep() {
|
2018-10-17 19:40:20 +02:00
|
|
|
ps aux | grep "${1:-.}" | grep -v grep
|
2013-07-05 13:27:43 +02:00
|
|
|
}
|
|
|
|
# Kills any process that matches a regexp passed to it
|
|
|
|
killit() {
|
|
|
|
ps aux | grep -v "grep" | grep "$@" | awk '{print $2}' | xargs sudo kill
|
|
|
|
}
|
|
|
|
|
|
|
|
# list contents of directories in a tree-like format
|
2018-10-17 19:40:20 +02:00
|
|
|
if ! (( $+commands[tree] )); then
|
|
|
|
tree () {
|
|
|
|
find $@ -print | sed -e 's;[^/]*/;|____;g;s;____|; |;g'
|
|
|
|
}
|
2013-07-05 13:27:43 +02:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Sort connection state
|
|
|
|
sortcons() {
|
|
|
|
netstat -nat |awk '{print $6}'|sort|uniq -c|sort -rn
|
|
|
|
}
|
|
|
|
|
|
|
|
# View all 80 Port Connections
|
|
|
|
con80() {
|
|
|
|
netstat -nat|grep -i ":80"|wc -l
|
|
|
|
}
|
|
|
|
|
|
|
|
# On the connected IP sorted by the number of connections
|
|
|
|
sortconip() {
|
|
|
|
netstat -ntu | awk '{print $5}' | cut -d: -f1 | sort | uniq -c | sort -n
|
|
|
|
}
|
|
|
|
|
|
|
|
# top20 of Find the number of requests on 80 port
|
|
|
|
req20() {
|
|
|
|
netstat -anlp|grep 80|grep tcp|awk '{print $5}'|awk -F: '{print $1}'|sort|uniq -c|sort -nr|head -n20
|
|
|
|
}
|
|
|
|
|
|
|
|
# top20 of Using tcpdump port 80 access to view
|
|
|
|
http20() {
|
|
|
|
sudo tcpdump -i eth0 -tnn dst port 80 -c 1000 | awk -F"." '{print $1"."$2"."$3"."$4}' | sort | uniq -c | sort -nr |head -20
|
|
|
|
}
|
|
|
|
|
|
|
|
# top20 of Find time_wait connection
|
|
|
|
timewait20() {
|
|
|
|
netstat -n|grep TIME_WAIT|awk '{print $5}'|sort|uniq -c|sort -rn|head -n20
|
|
|
|
}
|
|
|
|
|
|
|
|
# top20 of Find SYN connection
|
|
|
|
syn20() {
|
|
|
|
netstat -an | grep SYN | awk '{print $5}' | awk -F: '{print $1}' | sort | uniq -c | sort -nr|head -n20
|
|
|
|
}
|
|
|
|
|
|
|
|
# Printing process according to the port number
|
|
|
|
port_pro() {
|
2018-10-17 19:40:20 +02:00
|
|
|
netstat -ntlp | grep "${1:-.}" | awk '{print $7}' | cut -d/ -f1
|
2013-07-05 13:27:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
# top10 of gain access to the ip address
|
|
|
|
accessip10() {
|
|
|
|
awk '{counts[$(11)]+=1}; END {for(url in counts) print counts[url], url}' "$(retlog)"
|
|
|
|
}
|
|
|
|
|
|
|
|
# top20 of Most Visited file or page
|
|
|
|
visitpage20() {
|
|
|
|
awk '{print $11}' "$(retlog)"|sort|uniq -c|sort -nr|head -20
|
|
|
|
}
|
|
|
|
|
|
|
|
# top100 of Page lists the most time-consuming (more than 60 seconds) as well as the corresponding page number of occurrences
|
|
|
|
consume100() {
|
|
|
|
awk '($NF > 60 && $7~/\.php/){print $7}' "$(retlog)" |sort -n|uniq -c|sort -nr|head -100
|
|
|
|
# if django website or other webiste make by no suffix language
|
|
|
|
# awk '{print $7}' "$(retlog)" |sort -n|uniq -c|sort -nr|head -100
|
|
|
|
}
|
|
|
|
|
|
|
|
# Website traffic statistics (G)
|
|
|
|
webtraffic() {
|
|
|
|
awk "{sum+=$10} END {print sum/1024/1024/1024}" "$(retlog)"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Statistical connections 404
|
|
|
|
c404() {
|
|
|
|
awk '($9 ~/404/)' "$(retlog)" | awk '{print $9,$7}' | sort
|
|
|
|
}
|
|
|
|
|
|
|
|
# Statistical http status.
|
|
|
|
httpstatus() {
|
|
|
|
awk '{counts[$(9)]+=1}; END {for(code in counts) print code, counts[code]}' "$(retlog)"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Delete 0 byte file
|
|
|
|
d0() {
|
2018-10-17 19:40:20 +02:00
|
|
|
find "${1:-.}" -type f -size 0 -exec rm -rf {} \;
|
2013-07-05 13:27:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
# gather external ip address
|
|
|
|
geteip() {
|
2015-08-27 11:34:53 +02:00
|
|
|
curl -s -S https://icanhazip.com
|
2013-07-05 13:27:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
# determine local IP address
|
|
|
|
getip() {
|
2015-08-27 11:34:53 +02:00
|
|
|
if (( ${+commands[ip]} )); then
|
2015-08-26 10:48:01 +02:00
|
|
|
ip addr | grep "inet " | grep -v '127.0.0.1' | awk '{print $2}'
|
|
|
|
else
|
|
|
|
ifconfig | grep 'inet addr:'| grep -v '127.0.0.1' | cut -d: -f2 | awk '{ print $1}'
|
|
|
|
fi
|
2013-07-05 13:27:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
# Clear zombie processes
|
|
|
|
clrz() {
|
|
|
|
ps -eal | awk '{ if ($2 == "Z") {print $4}}' | kill -9
|
|
|
|
}
|
|
|
|
|
|
|
|
# Second concurrent
|
|
|
|
conssec() {
|
|
|
|
awk '{if($9~/200|30|404/)COUNT[$4]++}END{for( a in COUNT) print a,COUNT[a]}' "$(retlog)"|sort -k 2 -nr|head -n10
|
|
|
|
}
|