1、下载解压spark
cd /home/hadoop/app
tar -zxvf spark-3.3.0-bin-hadoop3.tgz
rm spark-3.3.0-bin-hadoop3.tgz
ln -s spark-3.3.0-bin-hadoop3 spark
2、修改spark-env.sh
cd /home/hadoop/app/spark/conf
vi spark-env.sh
#追加下面的路径
HADOOP_CONF_DIR=/home/hadoop/app/hadoop/etc/hadoop
3、测试运行
cd /home/hadoop/app/spark
bin/spark-submit --class org.apache.spark.examples.SparkPi --master yarn examples/jars/spark-examples_2.12-3.3.0.jar 10 #使用spark自带jar包,提交到Hadoop的YARN上运行
http://hadoop01:8080 #打开地址会看到任务执行状态为SUCCEEDED