from pyflink.table import EnvironmentSettings, TableEnvironment
from pyflink.table import EnvironmentSettings, TableEnvironment, DataTypes
from pyflink.table.udf import TableFunction, udtf
from pyflink.table.expressions import call
from pyflink.table.udf import udf
# 1. 创建 TableEnvironment
env_settings = EnvironmentSettings.new_instance().in_streaming_mode().use_blink_planner().build()
table_env = TableEnvironment.create(env_settings)
# 2. 创建 source 表
table_env.execute_sql("""
CREATE TABLE datagen (
i INT,
j INT,
m VARCHAR
) WITH (
'connector' = 'kafka',
'topic' = 'flink_udftest1',
'properties.bootstrap.servers' = '1.21:9092',
'properties.group.id' = 'test_PrintUdf',
'scan.startup.mode' = 'latest-offset',
'format' = 'json'
)
""")
# @udf(input_types=[DataTypes.INT(), DataTypes.INT()], result_type=DataTypes.INT())
# def add(i, j):
# return i + j
# table_env.register_function("add", add)
@udtf(result_types=[DataTypes.STRING(), DataTypes.STRING()])
def splitTest(s: str):
splits = s.split("#")
yield splits[0], splits[1]
# 注册 Python 表值函数
#split = udtf(splitTest(), result_types=[DataTypes.STRING(), DataTypes.STRING()])
table_env.register_function("splitStr", splitTest)
# 3. 创建 sink 表i INT,
# j INT,m VARCHAR
table_env.execute_sql("""
CREATE TABLE print (
m1 VARCHAR ,
m2 VARCHAR
) WITH (
'connector' = 'print'
)
""")
# 4. 查询 source 表,同时执行计算
# 通过 Table API 创建一张表:
source_table = table_env.from_path("datagen")
# 或者通过 SQL 查询语句创建一张表:
# source_table = table_env.sql_query("SELECT * FROM datagen")
#source_table.execute()
#table_env.create_temporary_function("split", udtf(splitTest(), result_types=[DataTypes.STRING(), DataTypes.STRING()]))
#table_env.sql_query("SELECT m1, m2 FROM datagen, LATERAL TABLE(splitStr(m)) as T(m1, m2)")
#table_env.sql_query("SELECT a, word, length FROM MyTable LEFT JOIN LATERAL TABLE(split(a)) as T(word, length) ON TRUE")
table_env.sql_update("insert into print SELECT m1, m2 FROM datagen, LATERAL TABLE(splitStr(m)) as T(m1, m2)")
table_env.execute("job_sum")
python实现pyflink的table自定义函数
最新推荐文章于 2024-05-21 07:00:00 发布