1,解压缩spark安装文件
tar -zxvf spark-3.0.0-bin-hadoop3.2.tgz -C /opt/module
2,修改spark目录名称
mv spark-3.0.0-bin-hadoop3.2 spark-yarn
3,修改hadoop的yarn-site.xml文件,新增下面内容
<property>
<name>yarn.nodemanager.pmem-check-enabled</name>
<value>false</value>
</property>
<property>
<name>yarn.nodemanager.vmem-check-enabled</name>
<value>false</value>
</property>
4,将hadoop的yarn-site.xml配置文件分发到其它机器
xsync yarn-site.xml
5,修改spark-yarn/conf/spark-env.sh配置文件
export JAVA_HOME=/opt/module/jdk1.8.0_144
YARN_CONF_DIR=/opt/module/hadoop/etc/hadoop
6,拷贝hive的hive-site.xml配置文件到spark conf目录下
cp /opt/module/hive-3.1.2/conf/hive-site.xml /opt/module/spark-yarn/conf/
7,拷贝mysql的驱动包&hadoop的lzo jar包到spark jars目录下
cp /opt/module/hive-3.1.2/lib/mysql-connector-java-5.1.27-bin.jar /opt/module/spark-yarn/jars/
cp /opt/module/hadoop-3.1.3/share/hadoop/common/hadoop-lzo-0.4.20.jar /opt/module/spark-yarn/jars/
8,重启hadoop集群
sbin/start-dfs.sh
sbin/start-yarn.sh