spark.eventLog.enabled true
spark.eventLog.dir hdfs://bigdata01:8020/spark-logs
export SPARK_HISTORY_OPTS="-Dspark.history.ui.port=68080 -Dspark.history.retainedApplications=3 -Dspark.history.fs.logDirectory=hdfs://bigdata01:8020/spark-logs"
export SPARK_DAEMON_JAVA_OPTS="-Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=bigdata01:2181,bigdata02:2181,bigdata03:2181 -Dspark.deploy.zookeeper.dir=/spark"
JAVA_HOME=/usr/java/default/
SPARK_MASTER_PORT=bigdata01
SPARK_MASTER_PORT=7077
SPARK_WORKER_MEMORY=1g
SPARK_WORKER_CORES=1
SPARK_HISTORY_OPTS="-Dspark.history.ui.port=68080 -Dspark.history.retainedApplications=3 -Dspark.history.fs.logDirectory=hdfs://bigdata01:8020/spark-logs"
SPARK_DAEMON_JAVA_OPTS="-Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=bigdata01:2181,bigdata02:2181,bigdata03:2181 -Dspark.deploy.zookeeper.dir=/spark"
代码运行和打jar包测试
点击 APPLY 然后。
第一次打包就是build 其次后面都是rebuild
然后我是直接将打好的jar包放在 spark安装目录下面:
在 Linux 环境下 运行如下脚本:
[root@bigdata01 spark-2.4.0-bin]# spark-submit --master spark://bigdata01:7077 --class com.atguigu.c01.wordcount WordCount.jar
基本上运行成功