Flink 本地idea Table API 运行demo存档

本文介绍了如何在ApacheFlink中使用datagenconnector生成数据,并通过TableAPI将数据发送到控制台打印sink。同时,展示了如何配置log4j和SLF4J用于日志输出。
摘要由CSDN通过智能技术生成

更新日志

2024-02-15 词频统计demo版 (包含Flink UI查看)

主体代码 datagen -> 控制台输出

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author 划水小苏
 * @date 2024/02/15 15:29
 * @target:
 **/
public class TestDemo01 {
    public static void main(String[] args) {
        //1.使用本地模式
        Configuration conf = new Configuration();
//设置WebUI绑定的本地端口
        conf.setString(RestOptions.BIND_PORT,"8081");
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inStreamingMode()
                .withConfiguration(conf)
                //.inBatchMode()
                .build();
        TableEnvironment tabelEnv = TableEnvironment.create(settings);
        tabelEnv.executeSql("CREATE TABLE datagenSource (\n" +
                "    t1     BIGINT,\n" +
                "    t2     BIGINT,\n" +
                "    t3     STRING\n" +
                ") WITH (\n" +
                "   'connector'  = 'datagen',\n" +
                "   'rows-per-second' = '10',\n" +
                "    'fields.t1.min'='7848',\n" +
                "   'fields.t1.max'='10000',\n" +
                "    'fields.t2.min'='2424',\n" +
                "   'fields.t2.max'='484861',\n" +
                "   'fields.t3.length'='3' \n" +
                ")");

        tabelEnv.executeSql("CREATE TABLE PrintSink (\n" +
                "    p1     STRING,\n" +
                "    p2     BIGINT \n" +
                ") WITH (\n" +
                "   'connector'  = 'print'\n" +
                ")");
        Table table1 = tabelEnv.from("datagenSource");
//        Table table2 = tabelEnv.from("PrintSink");
        Table result = table1.groupBy($("t3"))
                .select($("t3"),$("t3").count().as("amount"));
        result.insertInto("PrintSink").execute();
    }
}

pom文件

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.example</groupId>
    <artifactId>FlinkStudy</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <maven.compiler.source>8</maven.compiler.source>
        <maven.compiler.target>8</maven.compiler.target>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <flink.version>1.17.2</flink.version>
        <log4j.version>1.2.17</log4j.version>
        <slf4j.version>1.7.25</slf4j.version>
        <slf4j.api.version>1.7.25</slf4j.api.version>
    </properties>
    <dependencies>
        <!--Flink必备本地运行依赖 详细查看  https://nightlies.apache.org/flink/flink-docs-release-1.17/zh/docs/dev/configuration/overview/-->
        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-core</artifactId>
            <version>${flink.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-java</artifactId>
            <version>${flink.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-streaming-java</artifactId>
            <version>${flink.version}</version>
        </dependency>

<!--        flink-table-api-java-bridge-->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-java-bridge</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-clients -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-clients</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-datagen -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-datagen</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table-planner -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-planner_2.12</artifactId>
            <version>${flink.version}</version>
        </dependency>

<!--        flink UI 本地包-->
        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-runtime-web -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-runtime-web</artifactId>
            <version>${flink.version}</version>
        </dependency>



        <!--        测试模块运行必备插件-->

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-test-utils</artifactId>
            <version>${flink.version}</version>
            <scope>test</scope>
        </dependency>


        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-test-utils</artifactId>
            <version>${flink.version}</version>
            <scope>test</scope>
        </dependency>

<!--        日志输出-->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.25</version>
        </dependency>

        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
            <version>2.9.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-api</artifactId>
            <version>2.9.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.9.1</version>
        </dependency>


    </dependencies>

</project>

日志xml

resource文件下新建 log4j2.xml文件

<?xml version="1.0" encoding="UTF-8"?>
<configuration monitorInterval="5">
    <Properties>
        <property name="LOG_PATTERN" value="%date{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n" />
        <property name="LOG_LEVEL" value="INFO" />
    </Properties>

    <appenders>
        <console name="Console" target="SYSTEM_OUT">
            <PatternLayout pattern="${LOG_PATTERN}"/>
            <ThresholdFilter level="${LOG_LEVEL}" onMatch="ACCEPT" onMismatch="DENY"/>
        </console>
    </appenders>

    <loggers>
        <root level="${LOG_LEVEL}">
            <appender-ref ref="Console"/>
        </root>
    </loggers>

</configuration>

  • 10
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值