文章参考阿里云社区
https://developer.aliyun.com/article/72228
官网下载
我用的事 hive 4.0.0 alpha 2
spark 3.3.1
hadoop 3.3.1
更改conf下的配置文件,生成如下所示配置及文件名的配置内容
spark-env.sh
export JAVA_HOME=$JAVA_HOME
#export SCALA_HOME=/root/scala
export HADOOP_HOME=$HADOOP_HOME
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export YARN_CONF_DIR=$HADOOP_HOME/etc/hadoop
export SPARK_LAUNCH_WITH_SCALA=0
export SPARK_WORKER_MEMORY=1g
export SPARK_DRIVER_MEMORY=1g
export SPARK_MASTER_IP=ha-01
export SPARK_LIBRARY_PATH=$SPARK_HOME/jars
export SPARK_MASTER_WEBUI_PORT=18080
export SPARK_WORKER_DIR=$SPARK_HOME/work
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_PORT=7078
export SPARK_LOG_DIR=$SPARK_HOME/log
export SPARK_PID_DIR='$SPARK_HOME/run'
spark-defaults.conf
spark.master yarn-cluster
spark.home $SPARK_HOME
#spark.eventLog.enabled true
spark.serializer org.apache.spark.serializer.KryoSerializer
spark.executor.memory 1g
spark.driver.memory 1g
spark.executor.extraJavaOptions -XX:+PrintGCDetails -Dkey=value -Dnumbers="one two three"
workers
ha-01
ha-02
ha-03