IDEA安装Spark
- 两种方式:
- 从本地安装的Spark中导入 jar包,File - Project Structure - Libraies - "+" - java - 选择spark目录下jars文件夹
- 通过Maven添加pom依赖
本地安装演示:
pom添加演示:
图文步骤:
- 本地安装:
- File - Project Structure - Libraies - "+" - java
- 选择spark目录下jars文件夹
2. Maven项目,pom.xml文件中添加Spark依赖,需要联网下载,或者本地库中已经下载好依赖包
<!-- 在properties中添加Spark版本 -->
<properties>
<spark.version>2.3.3</spark.version>
</properties>
<!-- 在dependencies中添加Spark组件 -->
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
</dependencies>