1.继承Aggregator
2.实现方法
3.注册函数
4.通过DataSet数据集获取结果
package com.wxx.bigdata.sql03
import org.apache.spark.sql.{Encoders, SparkSession}
import org.apache.spark.sql.expressions.Aggregator
object CustomerUDAFClassAPP {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder().master("local[2]").appName("CustomerUDFClassAPP").getOrCreate()
val df = spark.read.json("data/test/user.json")
import spark.implicits._
//转换为DateSet
val ds = df.as[Users]
//将聚合函数转换为查询的列
val avgage = CustomerAvg.toColumn.name("avgAge")
//应用函数
ds.select(avgage).show()
spark.stop()
}
}
//Exception in thread "main" org.apache.spark.sql.AnalysisException: Cannot up cast `age` from bigint to int as it may truncate
//The type path of the target object is:
//- field (class: "scala.Int", name: "age")
//- root class: "com.wxx.bi