- pom中添加依赖
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.12</artifactId>
<version>3.0.0</version>
</dependency>
- Test 1
package test.wyh.sql
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object TestSQL {
def main(args: Array[String]): Unit = {
//创建SparkSQL的运行环境
val sparkSQLConf = new SparkConf().setMaster("local[*]").setAppName("testSparkSQL")
val sparkSession = SparkSession.builder().config(sparkSQLConf).getOrCreate()
//DataFrame
val df = sparkSession.read.json("raw_data/student.json")
df.show()
}
}
创建student.json&#