1.解压spark-2.4.4-bin-hadoop2.7.tgz包到module
[root@hadoop101 software]# tar -xvf spark-2.4.4-bin-hadoop2.7.tgz -C /opt/module/
2.进入spark的conf里面
重命名:spark文件名:
[root@hadoop101 module]# mv spark-2.4.4-bin-hadoop2.7 spark-2.4.4-hadoop2.7
进入文件里面:
[root@hadoop101 module]# cd spark-2.4.4-hadoop2.7/conf/
[root@hadoop101 conf]# cp slaves.template slaves
[root@hadoop101 conf]# cp spark-env.sh.template spark-env.sh
3.配置文件
spark-env.sh
export JAVA_HOME=/opt/module/jdk1.8.0_221
export HADOOP_HOME=/opt/module/hadoop-2.9.2
export SPARK_MASTER_IP=192.168.10.101
export SPARK_WORKED_MEMORY=1g
export HADOOP_CONF_DIR=/opt/module/hadoop-2.9.2/etc/hadoop
export SPARK_DIST_CLASSPATH=$(/opt/module/hadoop-2.9.2/bin/hadoop classpath)
slaves(只用写hadoop102和hadoop103)
hadoop102
hadoop103
6.配置环境变量
vi ~/.bash_profile
内容如下:(添加spark_home环境变量)
export PATH
JAVA_HOME=/opt/module/jdk1.8.0_221
HADOOP_HOME=/opt/module/hadoop-2.9.2
SPARK_HOME=/opt/module/spark-2.4.4-hadoop2.7
HIVE_HOME=/opt/module/apache-hive-2.3.6-bin
export HBASE_HOME=/opt/module/hbase-0.98.17-hadoop2
CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
PATH=$PATH:$HOME/bin:$ZK_HOME/bin:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HBASE_HOME/bin:$SPARK_HOME/bin
export PATH JAVA_HOME HIVE_HOME CLASSPATH HADOOP_HOME ZK_HOME SPARK_HOME
ZK_HOME=/opt/module/zookeeper-3.4.7
7.scp到另外两个节点上面
[root@hadoop101 module]# scp -r spark-2.4.4-hadoop2.7 root@hadoop102:/opt/module/
[root@hadoop101 module]# scp -r spark-2.4.4-hadoop2.7 root@hadoop103:/opt/module/
8.进入spark的sbin目录下面/opt/module/spark-2.4.4-hadoop2.7/sbin
[root@hadoop101 sbin]# ./start-all.sh
hadoop101里面
hadoop102里面
hadoop103里面
成功登录:192.168.10.101:8080