flink 常见问题汇总

flink 1.11 No operators defined in streaming topology. Cannot execute.

代码如下:

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * @ClassName: TableTest3_FileOutput
 * @Description:
 * @Author: wushengran on 2020/11/13 11:54
 * @Version: 1.0
 */
public class TableTest3_FileOutput {
    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 2. 表的创建:连接外部系统,读取数据
        // 读取文件
        String filePath = "F:\\project\\flink20210920\\src\\main\\resources\\sensor.txt";
        tableEnv.connect(new FileSystem().path(filePath))
                .withFormat(new Csv())
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("timestamp", DataTypes.BIGINT())
                        .field("temp", DataTypes.DOUBLE())
                )
                .createTemporaryTable("inputTable");

        Table inputTable = tableEnv.from("inputTable");
//        inputTable.printSchema();
//        tableEnv.toAppendStream(inputTable, Row.class).print();

        // 3. 查询转换
        // 3.1 Table API
        // 简单转换
        Table resultTable = inputTable.select("id, temp")
                .filter("id === 'sensor_6'");

        // 聚合统计
        Table aggTable = inputTable.groupBy("id")
                .select("id, id.count as count, temp.avg as avgTemp");

        // 3.2 SQL
        tableEnv.sqlQuery("select id, temp from inputTable where id = 'senosr_6'");
        Table sqlAggTable = tableEnv.sqlQuery("select id, count(id) as cnt, avg(temp) as avgTemp from inputTable group by id");

        // 4. 输出到文件
        // 连接外部文件注册输出表
        String outputPath = "F:\\project\\flink20210920\\src\\main\\resources\\out.txt";
        tableEnv.connect(new FileSystem().path(outputPath))
                .withFormat(new Csv())
                .withSchema(new Schema()
                                .field("id", DataTypes.STRING())
//                        .field("cnt", DataTypes.BIGINT())
                                .field("temperature", DataTypes.DOUBLE())
//                        .field("avgTemp", DataTypes.DOUBLE())
                )
                .createTemporaryTable("outputTable");

//        tableEnv.toRetractStream(aggTable, Row.class).print("agg");
        resultTable.insertInto("outputTable");  // TODO 报错? 原因?
//        aggTable.insertInto("outputTable");  // 文件系统不支持聚合后有更新操作
//        resultTable.executeInsert("outputTable",true);
//        tableEnv.insertInto(resultTable, "outputTable", new SteramQueryConfig());
        env.execute("test");
    }
}

受到https://blog.csdn.net/wtmdcnm/article/details/117821106以及官网描述
修改测试代码如下:

// 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.executeSql("CREATE TABLE MyTable(\n" +
                "`id` STRING," +
                "`timestamp` BIGINT," +
                "`temp` DOUBLE" +
                ") WITH (\n" +
                "  'connector' = 'filesystem',\n" +
                "  'path' = 'F:\\project\\flink20210920\\src\\main\\resources\\sensor.txt',\n" +
                "  'format' = 'csv'\n" +
                ")");

        Table resultTable = tableEnv.sqlQuery("select id, temp from MyTable where id = 'sensor_1'");

        tableEnv.executeSql("CREATE TABLE MyTable1(\n" +
                "`id` STRING," +
                "`temp` DOUBLE" +
                ") WITH (\n" +
                "  'connector' = 'filesystem',\n" +
                "  'path' = 'F:\\project\\flink20210920\\src\\main\\resources\\out',\n" +
                "  'format' = 'csv'\n" +
                ")");

        resultTable.executeInsert("MyTable1");

执行成功

please declare primary key for sink table when query contains update/delete record.

"id STRING primary key," + // 聚合类操作必须要指定一个主键

Column ‘dt’ is NOT NULL, however, a null value is being written into it. You can set job configuration ‘table.exec.sink.not-null-enforcer’=‘drop’ to suppress this exception and drop such records silently.

set table.exec.sink.not-null-enforcer=DROP

java.lang.ClassCastException: java.lang.Boolean cannot be cast to java.lang.Integer

mysql 与flink的数据类型映射
在这里插入图片描述
官方文档地址:https://nightlies.apache.org/flink/flink-docs-release-1.13/zh/docs/connectors/table/jdbc/#%E6%95%B0%E6%8D%AE%E7%B1%BB%E5%9E%8B%E6%98%A0%E5%B0%84
在这里插入图片描述

Caused by: org.apache.flink.util.SerializedThrowable: Elasticsearch exception [type=circuit_breaking_exception, reason=[parent] Data too large, data for [<http_request>] would be [1003554592/957mb], which is larger than the limit of [986061209/940.3mb], real usage: [1003525192/957mb], new bytes reserved: [29400/28.7kb], usages [request=49320/48.1kb, fielddata=8948/8.7kb, in_flight_requests=35272046/33.6mb, model_inference=0/0b, accounting=2542560/2.4mb]]

ES内存不足, 需要增加内存大小

Caused by: org.elasticsearch.ElasticsearchException: Elasticsearch exception [type=version_conflict_engine_exception, reason=[2021-12-13_01010010121_ooTTjvnAOi7VEHvZ70iXJQegHWqU_17da79dbcc112a-01fefad191971f-20f783a-280800-17da79dbcc2ec-1639292135844]: version conflict, required seqNo [540622], primary term [2]. current document has seqNo [540675] and primary term [2]]

涉及乐观锁与悲观锁
高并发情况下, 同时更新一个主键造成的问题, 主键的指定不太合理, 需要修改

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值