FlinkSQL 数据写入kafka报错ThreadUtils

pom文件

flink-connector-debezium:2.2.0
flink-connector-kafka_2.11:1.13.6
flink-connector-mysql-cdc:2.20
flink-connector-kafka_2.11:1.13.6

报错

java.lang.NoClassDefFoundError: org/apache/kafka/common/utils/ThreadUtils at com.ververica.cdc.debezium.internal.FlinkOffsetBackingStore.start(FlinkOffsetBackingStore.java:152) ~[flink-connector-debezium-2.2.0.jar:2.2.0] at com.ververica.cdc.debezium.internal.FlinkOffsetBackingStore.configure(FlinkOffsetBackingStore.java:71) ~[flink-connector-debezium-2.2.0.jar:2.2.0] at io.debezium.embedded.EmbeddedEngine.run(EmbeddedEngine.java:690) [debezium-embedded-1.5.4.Final.jar:1.5.4.Final] at io.debezium.embedded.ConvertingEngineBuilderWorker.run(ThreadPoolExecutor.java:617) [?:1.8.0_121] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_121] 19:49:50,611 ERROR com.ververica.cdc.debezium.internal.Handover [] - Reporting error: java.lang.NoClassDefFoundError: org/apache/kafka/common/utils/ThreadUtils at com.ververica.cdc.debezium.internal.FlinkOffsetBackingStore.start(FlinkOffsetBackingStore.java:152) ~[flink-connector-debezium-2.2.0.jar:2.2.0] at com.ververica.cdc.debezium.internal.FlinkOffsetBackingStore.configure(FlinkOffsetBackingStore.java:71) ~[flink-connector-debezium-2.2.0.jar:2.2.0] at io.debezium.embedded.EmbeddedEngine.run(EmbeddedEngine.java:690) [debezium-embedded-1.5.4.Final.jar:1.5.4.Final] at

解决方案

添加kafka依赖

<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka-clients</artifactId>
    <version>2.7.0</version>
</dependency>

代码案例

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
public class Testkafka {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
        env.setParallelism(1);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().useBlinkPlanner().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
        Configuration configuration = tableEnv.getConfig().getConfiguration();
//        configuration.setString("table.exec.mini-batch.enabled", "true"); // enable mini-batch optimization
//        configuration.setString("table.exec.mini-batch.allow-latency", "5 s"); // use 5 seconds to buffer input records
//        configuration.setString("table.exec.mini-batch.size", "5000"); // the maximum number of records can be buffered by each aggregate operator task
        //configuration.setString("table.exec.state.ttl", "60000");
       /* String skersker = "CREATE TABLE skersker (\n" +
                "  d_id varchar(255),\n" +
                "  d_sugg_word varchar(255),\n" +
                "  d_type varchar(255),\n" +
                "  d_update_time timestamp,\n" +
                "  primary key(d_type)  NOT ENFORCED \n" +
                ") WITH (\n" +
*//*                "  'connector' = 'jdbc',\n" +
                "  'url' = 'jdbc:mysql://192.168.50.120:3336/mydb?characterEncoding=UTF-8',\n" +
                "  'table-name' = 'flink_test_dt',\n" +
               "  'lookup.cache.max-rows' = '1000000',\n" +
               "  'lookup.cache.ttl' = '60000' ,\n" +
                "  'username' = 'root',\n" +
                "  'password' = '123456'\n" +*//*

                "  'connector' = 'mysql-cdc',\n" +
                "  'hostname' = '192.168.50.120',\n" +
                "  'port' = '3336',\n" +
                "  'username' = 'root',\n" +
                "  'password' = '123456',\n" +
                "  'database-name' = 'mydb',\n" +
                "  'table-name' = 'flink_test_dt'\n" +
                ")";*/

        String skersker = "create table source2_1 ( \n" +
                "  id BIGINT,\n" +
                "  day_time VARCHAR,\n" +
                "  amnount BIGINT,\n" +
                "  proctime AS PROCTIME ()\n" +
                ")\n" +
                " with ( \n" +
                "   'connector' = 'kafka',\n" +
                "   'topic' = 'source1',\n" +
                "   'properties.bootstrap.servers' = '192.168.50.111:9092,192.168.50.112:9092,192.168.50.113:9092', \n" +
                "   'properties.group.id' = 'flink_gp_test2-1',\n" +
                "   'scan.startup.mode' = 'earliest-offset',\n" +
                "   'format' = 'json',\n" +
                "   'json.fail-on-missing-field' = 'false',\n" +
                "   'json.ignore-parse-errors' = 'true'\n" +
                " )";
        String flink_test_mt = "CREATE TABLE flink_test_mt (\n" +
                "  m_type varchar(255),\n" +
                "  m_id varchar(255),\n" +
                "  m_sugg_word varchar(255),\n" +
                "  m_update_time timestamp,\n" +
                "  primary key(m_type)  NOT ENFORCED \n" +
                ") WITH (\n" +
                "  'connector' = 'mysql-cdc',\n" +
                "  'hostname' = '192.168.50.120',\n" +
                "  'port' = '3336',\n" +
                "  'username' = 'root',\n" +
                "  'password' = '123456',\n" +
                "  'database-name' = 'mydb',\n" +
                "  'table-name' = 'flink_test_mt_tmp'\n" +
                ")";
        String sink = "create table source2_2 ( \n" +
                "  id BIGINT,\n" +
                "  coupon_amnount BIGINT,\n" +
                "  proctime AS PROCTIME ()\n" +
                ")\n" +
                " with ( \n" +
                "   'connector' = 'kafka',\n" +
                "   'topic' = 'source2',\n" +
                "   'properties.bootstrap.servers' = '192.168.50.111:9092,192.168.50.112:9092,192.168.50.113:9092', \n" +
                "   'properties.group.id' = 'flink_gp_test2-2',\n" +
                "   'scan.startup.mode' = 'earliest-offset',\n" +
                "   'format' = 'json',\n" +
                "   'json.fail-on-missing-field' = 'false',\n" +
                "   'json.ignore-parse-errors' = 'true'\n" +
                " )";
        String sink_kafka ="CREATE TABLE sink (\n" +
                "   day_time string,\n" +
                "   total_gmv bigint,\n" +
                "   PRIMARY KEY (day_time) NOT ENFORCED\n" +
                ") WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = 'sink',\n" +
                "  'properties.bootstrap.servers' = '192.168.50.111:9092,192.168.50.112:9092,192.168.50.113:9092', \n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                ")";

/*        String sink_kafka = "create table skersker ( \n" +
                "  Data_d_id varchar(255),\n" +
                "  Data_d_sugg_word varchar(255),\n" +
                "  Data_d_type varchar(255),\n" +
                "  Data_d_update_time BIGINT,\n" +
                "  update_time AS TO_TIMESTAMP(FROM_UNIXTIME(Data_d_update_time / 1000, 'yyyy-MM-dd HH:mm:ss')),\n" +
                "  proctime AS PROCTIME (),\n" +
                "  primary key(Data_d_type)  NOT ENFORCED \n" +
                ")\n" +
                " with ( \n" +
                "  'connector' = 'upsert-kafka',\n" +
                "   'topic' = 'mysql_up',\n" +
                "   'properties.bootstrap.servers' = '192.168.50.120:9092', \n" +
                "  'key.format' = 'json',\n" +
                "  'value.format' = 'json'\n" +
                " )";*/
        tableEnv.executeSql(skersker);
        tableEnv.executeSql(flink_test_mt);
        tableEnv.executeSql(sink_kafka);
        tableEnv.executeSql(sink);
        tableEnv.executeSql(" INSERT INTO sink  SELECT \n" +
                "  day_time, \n" +
                "  SUM(amnount - coupon_amnount) AS total_gmv \n" +
                "FROM \n" +
                "  (\n" +
                "    SELECT\n" +
                "      a.day_time as day_time, \n" +
                "      a.amnount as amnount, \n" +
                "      b.coupon_amnount as coupon_amnount \n" +
                "    FROM \n" +
                "      source2_1 as a \n" +
                "      LEFT JOIN source2_2 b on b.id = a.id\n" +
                "  ) \n" +
                "GROUP BY \n" +
                "  day_time").print();


    }
}

插入图片在这里插入图片描述

  • 0
    点赞
  • 1
    收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
©️2022 CSDN 皮肤主题:数字20 设计师:CSDN官方博客 返回首页
评论

打赏作者

gaigaikuaipao

你的鼓励将是我创作的最大动力

¥2 ¥4 ¥6 ¥10 ¥20
输入1-500的整数
余额支付 (余额:-- )
扫码支付
扫码支付:¥2
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、C币套餐、付费专栏及课程。

余额充值