Flink流表转换DEMO

该博客展示了如何使用Flink进行实时数据处理,通过创建流处理环境,定义数据源,设置水印策略,进行SQL查询并转换数据,最后将结果输出到目的地。主要涉及Flink的Kafka连接器,JSON解析,以及流表转换操作。
摘要由CSDN通过智能技术生成
package pressure_measurement;

import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.guava18.com.google.common.collect.Lists;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.StatementSet;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import udfdemo.MaketOrderObj;

import java.util.Collections;
import java.util.Date;
import java.util.List;


/**
 * {"bill_h":"8001SPPA2110077777","remark_h":"山顶洞穴港","flag_conf":1,"conf_by":"0815","time_conf":"2021-10-26 15:52:22.000","def_by":"0815","time_def":"2021-10-26 15:49:21.000","type_pro":"34","market_channels":"2","pp_no":"1000004301","theme":"基友会","theme_descr":"无","adjtype":"0","bil_d":"8001SPPA2110077777","groupid":"0151","shopid":"15252321","price_sale":"37.5000","price_newpro":"29.9000","time_ok":"2021-10-27 00:00:00.000","time_back":"2021-10-28 23:59:59.000","remark_d":" ","flag_way":"0"}
 * ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 * System.out.println(OBJECT_MAPPER.writeValueAsString(value)
 */
public class QueueOverlayLogic2 {

    public static void main(String[] args) {
        System.setProperty("HADOOP_USER_NAME", "flink");
        //todo 1 init flink-sql environment
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);
        env.setParallelism(7);


        tEnv.executeSql("CREATE TABLE ods_t_bp_propricechg_d_r (\n" +
                "bill_h          VARCHAR,\n" +
                "shopid          VARCHAR,\n" +
                "remark_h        VARCHAR,\n" +
                "flag_conf       VARCHAR,\n" +
                "conf_by         VARCHAR,\n" +
                "time_conf       VARCHAR,\n" +
                "def_by          VARCHAR,\n" +
                "time_def        VARCHAR,\n" +
                "type_pro    
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值