1 环境变量配置(.bashrc(普通用户模式))
#SPARK
export SPARK_HOME=/home/CWH/Spark/spark-3.1.3-bin-without-hadoop
export PATH=${SPARK_HOME}/bin:$PATH
2相关文件配置
----spark-env.sh 文件
export JAVA_HOME=/home/CWH/jdk1.8.0_271
export SCALA_HOME=/home/CWH/Spark/scala-2.12.10
export SPARK_WORKER_MEMORY=30G
export SPARK_WORKER_CORES=16
export SPARK_WORKER_INSTANCES=1
export SPARK_MASTER_IP=10.103.105.76
export SPARK_DIST_CLASSPATH=$(/home/CWH/hadoop-3.1.3/bin/hadoop classpath)
export HADOOP_CONF_DIR=/home/CWH/hadoop-3.1.3/etc/hadoop
export SPARK_HISTORY_OPTS="-Dspark.history.ui.port=18080 "
---- workers 文件配置
westgisB077
westgisB078
westgisB079
westgisB080
3 启动命令
$SPARK_HOME/sbin/start-master.sh
$SPARK_HOME/sbin/start-slaves.sh
4 查看端口开启命令
netstat-tunlpgrep 8080
查看占用端口的进程
lsof-i:8080