官网
https://spark.apache.org/
# spark版本文档列表
https://spark.apache.org/docs/
# 查看指定版本文档
https://spark.apache.org/docs/2.4.7/
# 配置查看历史日志
https://spark.apache.org/docs/2.3.4/monitoring.html
# 下载安装包
https://spark.apache.org/downloads.html
spark使用yarn资源部署
$SPARK_HOME/conf/spark-defaults.conf
spark.eventLog.enabled true
spark.eventLog.dir hdfs://node:9000/sparkYarnLogDir
# HDFS上的目录sparkYarnLogDir需要提前存在
spark.yarn.historyServer.address=node1:18080
spark.history.ui.port=18080
配置$SPARK_HOME/conf/spark-env.sh
export JAVA_HOME=/opt/module/jdk1.8.0_144
YARN_CONF_DIR=/opt/module/hadoop/etc/hadoop # hadoop的配置文件地址
# 下面是配置历史日志
export SPARK_HISTORY_OPTS="
-Dspark.history.ui.port=18080
-Dspark.history.fs.logDirectory=hdfs://node1:9000/sparkYarnLogDir
-Dspark.history.retainedApplications=30"
提交应用(最后的5表示使用5个线程)(使用yarn集群执行)
bin/spark-submit \
--class org.apache.spark.examples.SparkPi \
--master yarn \
--deploy-mode cluster \
./examples/jars/spark-examples_2.12-3.0.0.jar 5
提交应用(最后的5表示使用5个线程)(使用client执行,测试使用)
bin/spark-submit \
--class org.apache.spark.examples.SparkPi \
--master yarn \
--deploy-mode client \
./examples/jars/spark-examples_2.12-3.0.0.jar 5
访问
# hadoop UI
http://node01/9870
# hadoop resourceManager UI
http://node01/8088
# spark UI
http://node01/8080
# spark Job UI
http://node01/4040
# spark History UI
http://node01/18080