1.下载Spark安装包
# 下载Spark
cd /export/softwares
wget https://archive.apache.org/dist/spark
.将spark上传到opt目录中
解压缩:tar -zxvf spark-3.1.1-bin-hadoop2.7.tgz
查看:ll
2.将1000权限改成root权限
chown -R root /opt/spark-3.1.1-bin-hadoop2.7
chgrp -R root /opt/spark-3.1.1-bin-hadoop2.7
3.改名
ln -s /opt/spark-3.1.1-bin-hadoop2.7 /opt/spark
4.测试
cd conf/
cp spark-env.sh.template spark-env.sh
vi spark-env.sh
export SCALA_HOME=/usr/local/scala/scala-2.13.5
export JAVA_HOME=/usr/local/java/jdk1.8.0_162
export SPARK_MASTER_IP=192.168.109.137
export SPARK_HOME=/opt/spark
export HADOOP_HOME=/opt/hadoop
export HADOOP_CONF_DIR=/opt/hadoop-2.6.4/etc/hadoop