object mysql50 { def main(args: Array[String]): Unit = { val spark: SparkSession = SparkSession.builder().appName("ReadJson").master("local[*]").getOrCreate() val url="jdbc:mysql://192.168.192.151:3306/mysql50" val pwd="root" val driver="com.mysql.jdbc.Driver" val user="root" val properties = new Properties() properties.setProperty("user",user) properties.setProperty("password",pwd) properties.setProperty("driver",driver) import org.apache.spark.sql.functions._ import spark.implicits._ val studentDF: DataFrame = spark.read.jdbc(url, "student", properties) val scoreDF: DataFrame = spark.read.jdbc(url, "score", properties) val courseDF: DataFrame = spark.read.jdbc(url, "course", properties) val teacherDF: DataFrame = spark.read.jdbc(url, "teacher", properties) 1、查询"01"课程比"02"课程成绩高的学生的信息及课程分数 /*scoreDF.as("s1").join(scoreDF.as(("s2")),"student_id") .filter("s1.course_id=01 and s2.course_id=02 and s1.score>s2.score") .join(studentDF,"student_id").show()*/ 2、查询"01"课程比"02"课程成绩低的学生的信息及课程分数 /*scoreDF.as("s1").join(scoreDF.as("s2"),"student_id") .filter("s1.course_id=01 and s2.course_id=02 and s1.score<s2.score").show()*/ 3、查询平均成绩大于等于60分的同学的学生编号和学生姓名和平均成绩 //scoreDF.as("s1").groupBy("student_id").avg("score").filter($"avg(score)">=60).join(studentDF,"student_id").show() 4、查询平均成绩小于60 分的同学的学生编号和学生姓名和平均成绩(包括有成绩的和无成绩的): /* scoreDF.as("s1").groupBy("student_id").avg("score").filter($"avg(score)<60") .join(studentDF,"student_id").show()*/ //5、查询所有同学的学生编号、学生姓名、选课总数、所有课程的总成绩: /*val df1: DataFrame = scoreDF.groupBy("student_id").count() val df2: DataFrame = scoreDF.groupBy("student_id").sum() studentDF.join(df1,"
08-17
484
![](https://csdnimg.cn/release/blogv2/dist/pc/img/readCountWhite.png)