Spark Standalone cluster try

Spark Standalone cluster

node*
-- stop firewalld
systemctl stop firewalld
systemctl disable firewalld
-- tar spark
cd /opt
tar -zxvf spark-2.4.0-bin-hadoop2.7.tgz
cd spark-2.4.0-bin-hadoop2.7
-- cp application & application data
ftp spark.test-1.0.jar -> /opt/spark-2.4.0-bin-hadoop2.7
ftp words_count.txt -> /opt/spark-2.4.0-bin-hadoop2.7/data

node1
cd /opt/spark-2.4.0-bin-hadoop2.7
./sbin/start-master.sh

node2
cd /opt/spark-2.4.0-bin-hadoop2.7
./sbin/start-slave.sh spark://node1:7077

node3
cd /opt/spark-2.4.0-bin-hadoop2.7
./sbin/start-slave.sh spark://node1:7077

node4
cd /opt/spark-2.4.0-bin-hadoop2.7
./sbin/start-slave.sh spark://node1:7077

node?
cd /opt/spark-2.4.0-bin-hadoop2.7
./bin/spark-submit --class xyz.fz.spark.WordsCount --master spark://node1:7077 spark.test-1.0.jar

spark result: Lines with Basics: 2, lines with Programming: 2

 

posted @ 2019-03-08 17:38  多彩泰坦  阅读(159)  评论(0编辑  收藏  举报