Ubuntu 配置 spark

下载文件:

Jdk: jdk-8u181-linux-x64.tar.gz

Scala: scala-2.11.8.tgz

Spark: spark-2.3.1-bin-hadoop2.7.tgz

Anaconda:Anaconda3-4.3.1-Linux-x86_64.sh

 

安装jdk/scala/spark:

sudo mkdir /usr/lib/jdk
sudo mkdir /usr/lib/scala
sudo mkdir /usr/lib/spark
cd ~/下载
sudo tar -zxvf jdk-8u181-linux-x64.tar.gz -C /usr/lib/jdk
sudo tar -zxvf scala-2.11.8.tgz -C /usr/lib/scala
sudo tar -zxvf spark-2.3.1-bin-hadoop2.7.tgz -C /usr/lib/spark
sudo vim /etc/profile
export JAVA_HOME=/usr/lib/jdk/jdk1.8.0_181
export JRE_HOME=${JAVA_HOME}/jre  
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib  
export PATH=${JAVA_HOME}/bin:$PATH
export SCALA_HOME=/usr/lib/scala/scala-2.11.8
export PATH=${SCALA_HOME}/bin:$PATH
export SPARK_HOME=/usr/lib/spark/spark-2.3.1-bin-hadoop2.7
export PATH=${SPARK_HOME}/bin:$PATH
export PYTHONPATH=/usr/lib/spark/spark-2.3.1-bin-hadoop2.7/python:/usr/bin/python
export PYSPARK_DRIVER_PYTHON="ipython"

cd /usr/lib/spark/spark-2.3.1-bin-hadoop2.7/python/lib
unzip -d ../ ./py4j-0.10.7-src.zip
 
 
 
 
posted @ 2018-09-01 20:21  Conco  阅读(118)  评论(0)    收藏  举报