spark环境安装
第一关
mkdir /app
cd /opt
tar -zxvf scala-2.12.7.tgz -C /app
vi /etc/profile
#set scala
SCALA_HOME=/app/scala-2.12.7
export PATH=$PATH:$SCALA_HOME/bin
source /etc/profile
scala -version
第二关
tar -zxvf spark-2.2.2-bin-hadoop2.7.tgz -C /app
vim /etc/profile
#set spark enviroment
SPARK_HOME=/app/spark-2.2.2-bin-hadoop2.7
export PATH=$PATH:$SPARK_HOME/bin
source /etc/profile
cd /app/spark-2.2.2-bin-hadoop2.7/conf
cp spark-env.sh.template spark-env.sh
vim spark-env.sh
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_111
export SCALA_HOME=/app/scala-2.12.7
export HADOOP_HOME=/usr/local/hadoop/
export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
export SPARK_MASTER_IP=evassh-9262177 # machine_name 根据自己的主机确定
export SPARK_LOCAL_IP=evassh-9262177 # machine_name 根据自己的主机确定
cd /app/spark-2.2.2-bin-hadoop2.7
./sbin/start-all.sh