spark 环境搭建
vim /etc/profile
#java enviroment config JAVA_HOME=/opt/jdk1.8.0_45 JAVA_JRE=${JAVA_HOME}/jre CLASS_PATH=${JAVA_HOME}/lib:${JAVA_JRE}/lib PATH=$PATH:${JAVA_HOME}/bin #scala enviroment config PATH=$PATH:/opt/scala-2.11.6/bin #sprk enviroment config SPARK_HOME=/opt/spark-1.4.0-bin-hadoop2.6 PATH=$PATH:${SPARK_HOME}/bin #hadoop enviroment config HADOOP_HOME=/opt/hadoop-2.7.0 PATH=$PATH:${HADOOP_HOME}/bin
vim /opt/spark-1.4.0-bin-hadoop2.6/conf/slaves
# A Spark Worker will be started on each of the machines listed below.
master
slave1
slave2
#localhost
vim /opt/spark-1.4.0-bin-hadoop2.6/conf/spark-env.sh
export SCALA_HOME=/opt/spark-1.4.0-bin-hadoop2.6 export JAVA_HOME=/opt/jdk1.8.0_45 export SPARK_MASTER_IP=10.211.55.7 export SPARK_WORKER_MEMORY=1024m export master=spark://10.211.55.7:7077

浙公网安备 33010602011771号