import java.util
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, ObjectInspectorFactory, StructObjectInspector}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
/**
* 自定义函数:
* UDF:User- Defined Funcation;用户定义(普通)函数,只对单行数值产生作用; 一进一出
* UDAF:User- Defined Aggregation Funcation;用户定义聚合函数,对多行数据产生作用(sum()、avg()...),多进一出
* UDTF:User- Defined Table-Generating Functions;用户定义表生成函数,输入一行 输出多行,一进多出
*
*/
//一进多出
object UDTF {
def main(args: Array[String]): Unit = {
val sparkSession: SparkSession = SparkSession.builder().appName("UDTF").master("local[*]")
.config("hive.metastore.uris", "thrift://192.168.153.101:9083")
.enableHiveSupport()
.getOrCreate()
val sc: SparkContext = sparkSession.sparkContext
import sparkSession.implicits._
val rdd: RDD[String] = sc.textFile("in/UDTF.txt")
val rdd2: RDD[(String, String, String)] = rdd
.map(x => x.split("//"))
.filter(x => x(1).equals("ls"))
.map(x => (x(0),x(1),x(2)))
val frame: DataFrame = rdd2.toDF("id","name","class")
frame.printSchema()
frame.show(false)
frame.createOrReplaceTempView("udtfTable")
sparkSession.sql("create temporary function Myudtf as 'kb15.function.MyUDTF'")
sparkSession.sql("select Myudtf(class) from udtfTable").show(false)
}
}
class MyUDTF extends GenericUDTF{
override def initialize(argOIs: Array[ObjectInspector]): StructObjectInspector = {
val fieldName: util.ArrayList[String] = new java.util.ArrayList[String]()
val fieldOIs: util.ArrayList[ObjectInspector] = new java.util.ArrayList[ObjectInspector]()
//定义输出字段类型
fieldName.add("type")
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector)
ObjectInspectorFactory.getStandardStructObjectInspector(fieldName,fieldOIs)
}
/*
* 传入 Haddop scala kafka hive hbase Oozie
* 输出 head type string
* Hadoop
* scala
* kafka
* hive
* hbase
* Oozie
* */
override def process(objects: Array[AnyRef]): Unit = {
val strings: Array[String] = objects(0).toString.split(" ")
for (str<- strings){
val temp: Array[String] = new Array[String](1)
temp(0) = str
forward(temp)
}
}
override def close(): Unit = {
}
}
11-09
175