1、打包
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
<!-- 编译环境能用,运行环境不可用(打包时不打进来) -->
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
<!-- 编译环境能用,运行环境不可用 -->
<scope>provided</scope>
</dependency>
2、上传到生产集群/opt/core/spark-2.3.1-bin-hadoop2.6/bin/spark-submit --class com.jiupaipay.wordcount.WordCount --master local[*] ./sparkwordcount-1.0-SNAPSHOT-jar-with-dependencies.jar
/opt/core/spark-2.3.1-bin-hadoop2.6/bin/spark-submit --queue root.line.pay --class com.jiupaipay.wordcount.WordCount --master yarn-client ./sparkwordcount-1.0-SNAPSHOT-jar-with-dependencies.jar