spark3安装部署
一、下载并解压
https://archive.apache.org/dist/spark
二、将conf文件夹下的spark-env.sh.template重命名为spark-env.sh,并添加配置
export JAVA_HOME=/opt/jdk1.8.0_281
#不用master ha需添加
#export SPARK_MASTER_HOST=node03
export HADOOP_HOME=/home/hadoop/apps/hadoop-3.2.1
export HADOOP_CONF_DIR=/home/hadoop/apps/hadoop-3.2.1/etc/hadoop
export SPARK_WORKER_MEMORY=500m
export SPARK_WORKER_CORES=1
#master ha需添加
export SPARK_DAEMON_JAVA_OPTS="-Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=node01:2181,node02:2181,node03:2181 -Dspark.deploy.zookeeper.dir=/spark"
三、配置环境变量
vi ~/.bashrc
export SPARK_HOME=/home/hadoop/apps/spark-3.0.2-bin-hadoop3.2
export PATH=
P
A
T
H
:
PATH:
PATH:SPARK_HOME/bin:$SPARK_HOME/sbin
source ~/.bashrc
四、启动
#master节点执行启动命令
start-all.sh
#standby master启动
start-master.sh