1.下载spark2.4.4及解压文件
wget https://mirrors.tuna.tsinghua.edu.cn/apache/spark/spark-2.4.4/spark-2.4.4-bin-without-hadoop-scala-2.12.tgz
tar -zxvf spark-2.4.4/spark-2.4.4-bin-without-hadoop-scala-2.12.tgz -C /usr/local/spark
2.配置spark环境变量
vim ~/.bash_profile
#添加spark环境变量
export SPARK_HOME=/usr/local/spark
export PATH=$PATH:$SPARK_HOME/bin
3.配置spark
#进入spark的conf目录
cp spark-env.sh.template spark-env.sh
vim spark-env.sh
#添加如下内容
export SPARK_MASTER_IP=192.168.100.200
export HADOOP_HOME=/usr/local/hadoop
export HADOOP_CONF_DIR=/usr/local/hadoop/etc/hadoop
export SPARK_DIST_CLASSPATH=$(/usr/local/hadoop/bin/hadoop classpath) #不添加,就会报错
#修改slaves
cp slaves.template slaves
vim slaves
#添加如下内容
node2
node3
4.将spark复制到从机
scp -r /usr/local/spark root@node2:/usr/local/
scp -r /usr/local/spark root@node3:/usr/local/
5.启动spark
#进入spark文件夹
sbin/start-all.sh
【备注】本文配置都是基于已经配置完成hadoop后完成的,可参考