shell_【常用】
ps -ef | grep "common" | cut -c 9-15 | xargs kill -9 一次kill 多个进程
ifconfig|grep "inet addr:"|grep -v "127.0.0.1"|cut -d: -f2|awk '{print $1}'
获取本机IP
脚本执行完 执行 trap "echo 123" exit
shell 获取随机数
cat /dev/urandom |od -x |tr -d ' ' |head -1
00000007b041324b4ae4ed46ec6f5b2ba15c42b
00000208d92144803e264dd2c0dc654489c796b
000004019f2ddfa234ec132baf8bf9dc805beac
00000609ffa88090f46f563dd93a38eb16feba1
磁盘空间
[work(lijiabin01)@tjtxvm-204-4 ~]$ du -h /home/work/lijiabin 4.0K /home/work/lijiabin/dir01/sub/sub1 8.0K /home/work/lijiabin/dir01/sub 12K /home/work/lijiabin/dir01 24K /home/work/lijiabin/ltest 15M /home/work/lijiabin [work(lijiabin01)@tjtxvm-204-4 ~]$ du -hs /home/work/lijiabin 15M /home/work/lijiabin [work(lijiabin01)@tjtxvm-204-4 ~]$ df -h /home/work/lijiabin Filesystem Size Used Avail Use% Mounted on /dev/xvda2 62G 11G 49G 18% /
#!/bin/bash # sh lhck_infotop_bak.sh "/opt/data/infotop/logs/payOrder.log.2016-05-08" filename=`basename $1`; HOSTS=(10.48.166.152 10.48.184.50 10.48.184.56) for((i=0;i<${#HOSTS[@]};i++)) do printf "\033[0;36;40m%3d\033[0;32;40m: =============== \033[0;33;40m%-18s \033[0;32;40m===============\033[0m\n" $i ${HOSTS[i]} desc="scp -r $1 work@10.48.204.4:/home/work/lijiabin/"$filename"_${HOSTS[i]}" ; ssh ${HOSTS[i]} "$desc" done exit 0
BEGIN_TIME="2016-05-15 00:00:00"; END_TIME="2016-05-16 00:00:00"; time_begin=$BEGIN_TIME; tmp="(1000,10010)"; while [ `date -d"$time_begin" +%s` -le `date -d"$END_TIME" +%s` ] do echo "----->$time_begin" time_begin=`date -d"$time_begin +1 day" "+%Y-%m-%d 00:00:00"`; SQL="select count(*) from topinfo_topinfo where source in $tmp ;"; /usr/bin/mysql -htopinfo-s.db.58dns.org --port=58885 -utopolr58user dbwww58com_topinfo --default-character-set=utf8 -p'qy#u$#3sfuHn$qJe' --skip-column-names -e"$SQL" ; done
starttime=`date -d '1 days ago' "+%Y-%m-%d 00:00:00"` #昨天
endtime=`date -d today "+%Y-%m-%d 00:00:00"` #今天
修改系统时间 sudo date -s "2016-07-21 20:24:00"
根据文件 file 的第一列去重 ,指定分隔符 -t ' '
sort -k 1,1 -u file
sort -k2 t.log |awk -F" " '{if($2 == VALUE ){print "---"$0} else {VALUE =$2;print $0} }'
根据文件 t.log 的 第二列排序,然后 根据 awk 分别获取重复行 和非重复行

浙公网安备 33010602011771号