常用的shell脚本

集群同步分发脚本:

my_xsync.sh

#1. 判断参数个数

if [ $# -lt 1 ]

then

  echo Not Enough Arguement!

  exit;

fi

#2. 遍历集群所有机器

for host in hadoop102 hadoop103 hadoop104

do

  echo ====================  $host  ====================

  #3. 遍历所有目录,挨个发送

  for file in $@

  do

    #4. 判断文件是否存在

    if [ -e $file ]

    then

      #5. 获取父目录

      pdir=$(cd -P $(dirname $file); pwd)

      #6. 获取当前文件的名称

      fname=$(basename $file)

      ssh $host "mkdir -p $pdir"

      rsync -av $pdir/$fname $host:$pdir

    else

      echo $file does not exists!

    fi

  done

done

 

Hadoop集群的启动/停止脚本

my_hadoop.sh

#!/bin/bash

if [ $# -lt 1 ]

then

    echo "No Args Input..."

    exit ;

fi

case $1 in

"start")

        echo " =================== 启动 hadoop集群 ==================="

 

        echo " --------------- 启动 hdfs ---------------"

        ssh hadoop102 "/opt/module/hadoop-3.1.3/sbin/start-dfs.sh"

        echo " --------------- 启动 yarn ---------------"

        ssh hadoop103 "/opt/module/hadoop-3.1.3/sbin/start-yarn.sh"

        echo " --------------- 启动 historyserver ---------------"

        ssh hadoop102 "/opt/module/hadoop-3.1.3/bin/mapred --daemon start historyserver"

;;

"stop")

        echo " =================== 关闭 hadoop集群 ==================="

 

        echo " --------------- 关闭 historyserver ---------------"

        ssh hadoop102 "/opt/module/hadoop-3.1.3/bin/mapred --daemon stop historyserver"

        echo " --------------- 关闭 yarn ---------------"

        ssh hadoop103 "/opt/module/hadoop-3.1.3/sbin/stop-yarn.sh"

        echo " --------------- 关闭 hdfs ---------------"

        ssh hadoop102 "/opt/module/hadoop-3.1.3/sbin/stop-dfs.sh"

;;

*)

    echo "Input Args Error..."

;;

esac

 

 

查看三台服务器上的进程脚本:

my_jpsall.sh
#!/bin/bash

for host in hadoop102 hadoop103 hadoop104

do

        echo =============== $host ===============

        ssh $host jps

done

posted @ 2022-05-16 20:09  黑山魁七  阅读(125)  评论(0)    收藏  举报