spark 提交任务(指定log4j日志配置)

#!/bin/bash

export JAVA_HOME=/usr/java/jdk1.8.0_202
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
#set hadoop env
export HADOOP_HOME=/opt/cloudera/parcels/CDH
export PATH=${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:$PATH


#提交任务
spark2-submit \
--master yarn \
--num-executors 6 \
--executor-memory 8G \
--executor-cores 2 \
--driver-memory 16G \
--conf spark.default.parallelism=30 \
--conf spark.yarn.executor.memoryOverhead=2048 \
--conf spark.storage.memoryFraction=0.5 \
--conf spark.shuffle.memoryFraction=0.3 \
--conf spark.dynamicAllocation.enable=false \
--conf "spark.driver.extraJavaOptions=-XX:+UseG1GC -Dlog4j.debug=true" \
--conf "spark.driver.extraJavaOptions=-Dlog4j.configuration=file:/xxx/log4j.properties" \
--conf "spark.executor.extraJavaOptions=-Dlog4j.configuration=file:/xxx/log4j.properties" \
--class xx xxx.jar xxx.properties

posted @ 2020-06-16 10:30  南风叶  阅读(2241)  评论(0编辑  收藏  举报