import d0708.udtf
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.TableEnvironment
import org.apache.flink.types.Row
object b {
def main(args:Array[String]):Unit={
val env =StreamExecutionEnvironment.getExecutionEnvironment
val tabEnv =TableEnvironment.getTableEnvironment(env)
tabEnv.registerFunction("udtf",new udtf)
var s1 ="dev01,1574944573000,2019/2020"
var s2 ="dev01,1574944573000,2019/2020/222"
var s3 ="dev01,1574944573000,"
var s4 ="dev01,1574944573000,null"
val ds:DataStream[(String,Long,String)]= env.fromElements(s4)
.map(x =>{
val arr = x.split(",",-1)
Tuple3.apply(arr(0), arr(1).toLong,arr(2))
})
ds.print("ds")
import org.apache.flink.table.api.scala._
tabEnv.registerDataStream("tbl1", ds,'devId, 'time,'data)
val rsTab = tabEnv.sqlQuery("select devId,`time`,`value` from tbl1 , LATERAL TABLE(udtf(data,'/')) as t(`value`) ")
rsTab.toRetractStream[Row].print("result")
env.execute()
}
}
package d0708
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.{TableEnvironment, Types}
import org.apache.flink.table.functions.{ScalarFunction, TableFunction}
import org.apache.flink.types.Row
class udtf extends TableFunction[Row]{
override def getResultType: TypeInformation[Row] = Types.ROW(Types.STRING)
def eval(s:String,sep:String): Unit ={
if (s == null || s.isEmpty){
null
}else{
val arr: Array[String] = s.split(sep)
for(a<- arr){
collect(Row.of(a))
}
}
}
}