一、History Server配置
### --- History Server
~~~ # 配置服务的history server:spark-defaults.conf
[root@hadoop01 ~]# vim $SPARK_HOME/conf/spark-defaults.conf
# history server
spark.master spark://hadoop01:7077
spark.eventLog.enabled true
spark.eventLog.dir hdfs://hadoop01:9000/spark-eventlog
spark.eventLog.compress true
spark.serializer org.apache.spark.serializer.KryoSerializer
spark.driver.memory 512m
~~~ # 配置服务的history server:spark-env.sh
[root@hadoop01 ~]# vim $SPARK_HOME/conf/spark-env.sh
export JAVA_HOME=/opt/yanqi/servers/jdk1.8.0_231
export HADOOP_HOME=/opt/yanqi/servers/hadoop-2.9.2
export HADOOP_CONF_DIR=/opt/yanqi/servers/hadoop-2.9.2/etc/hadoop
export SPARK_DIST_CLASSPATH=$(/opt/yanqi/servers/hadoop-2.9.2/bin/hadoop classpath)
export SPARK_MASTER_HOST=hadoop01
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_CORES=1
export SPARK_WORKER_MEMORY=1g
export SPARK_HISTORY_OPTS="-Dspark.history.ui.port=18080 -Dspark.history.retainedApplications=50 -Dspark.history.fs.logDirectory=hdfs://Hadoop01:9000/spark-eventlog"
~~~ # 发送到其它节点
[root@hadoop01 ~]# rsync-script $SPARK_HOME/conf/spark-defaults.conf
[root@hadoop01 ~]# rsync-script $SPARK_HOME/conf/spark-env.sh
[root@hadoop01 ~]# stop-all-spark.sh
[root@hadoop01 ~]# start-all-spark.sh
~~~ # 启动日志服务
[root@hadoop01 ~]# start-history-server.sh
[root@hadoop01 ~]# jps
HistoryServer
二、web端地址:http://hadoop01:18080/
![](https://i-blog.csdnimg.cn/blog_migrate/47d2f5a566435a63352569c77f6e0b49.png)