1.cd /bigdata/spark-3.0.1-bin-hadoop3.2/conf/
2.cp spark-env.sh.template spark-env.sh
3.vi spark-env.sh
export SCALA_HOME=/usr/local/scala-2.13.1
export JAVA_HOME=/usr/local/jdk1.8.0_221
export HADOOP_HOME=/usr/local/hadoop/hadoop-2.6.5
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
SPARK_MASTER_IP=master
SPARK_LOCAL_DIRS=/usr/local/spark-2.4.4-bin-hadoop2.6
SPARK_DRIVER_MEMORY=1G
4.cp slaves.template slaves
5.vi slaves
slave1
slave2
6.vi /etc/profile
SCALA_HOME=/usr/local/scala-2.13.1
PATH==
P
A
T
H
:
PATH:
PATH:SCALA_HOME/bin
SPARK_HOME=/bigdata/spark-3.0.1-bin-hadoop3.2
PATH=
P
A
T
H
:
PATH:
PATH:SPARK_HOME/bin
7.复制spark到从节点,更改从节点环境变量/etc/profile