flink: 向clickhouse写数据

一、依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>org.example</groupId>
    <artifactId>flink-proj</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <maven.compiler.source>8</maven.compiler.source>
        <maven.compiler.target>8</maven.compiler.target>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-java</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-csv</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-streaming-java_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>

<!--        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>-->

        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table-api-java -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-java</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-planner-blink_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-planner_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-java-bridge_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-json</artifactId>
            <version>1.11.1</version>
        </dependency>


        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-clients_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-kafka_2.11</artifactId>
            <version>1.11.1</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.bahir/flink-connector-redis -->
        <dependency>
            <groupId>org.apache.bahir</groupId>
            <artifactId>flink-connector-redis_2.12</artifactId>
            <version>1.1.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-elasticsearch7_2.12</artifactId>
            <version>1.11.1</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.22.1</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>2.0.12</version>
        </dependency>

        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.30</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-jdbc_2.12</artifactId>
            <version>1.11.1</version>
        </dependency>

        <dependency>
            <groupId>ru.yandex.clickhouse</groupId>
            <artifactId>clickhouse-jdbc</artifactId>
            <version>0.3.2</version>
        </dependency>





    </dependencies>

</project>

二、clickhouse中建表

create table userinfo(username varchar(100) primary key,passwd varchar(100));

三、通过Sink把从文件中读取的内容写到clickhouse

package cn.edu.tju.demo;

import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.sql.PreparedStatement;
import java.sql.SQLException;

public class Test16B {
    private static String CLICKHOUSE_SERVER = "xx.xx.xx.xx";
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment
                .getExecutionEnvironment();

        DataStream<String> mySource = environment.readTextFile("demo.txt");

        String sql = "insert into userinfo(username,passwd) values(?,?) ";
        JdbcConnectionOptions jdbcBuild = new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
                .withUrl("jdbc:clickhouse://" + CLICKHOUSE_SERVER + ":8123/default")
                .withUsername("default")
                .withPassword("yourpassword")
                .build();

        mySource.addSink(JdbcSink.sink(sql, new JdbcStatementBuilder<String>() {
            @Override
            public void accept(PreparedStatement ps, String s) throws SQLException {
                ps.setString(1, s);
                ps.setString(2, s);

            }
        }, jdbcBuild));

        environment.execute("my job");

    }







}






  • 2
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
### 回答1: 答: Flink 写入 Clickhouse 的代码可以采用 JDBC Sink 方式,具体实现可以参考 Flink 官方文档中的示例:https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/connectors/clickhouse.html。 ### 回答2: Flink是一个开源的流数据处理框架,而ClickHouse是一个高性能的列式数据库。使用Flink数据写入ClickHouse可以实现实时的数据处理和分析。 首先,需要在Flink项目中添加ClickHouse的依赖项。可以在pom.xml文件中添加以下代码: ``` <dependency> <groupId>ru.yandex.clickhouse</groupId> <artifactId>clickhouse-jdbc</artifactId> <version>0.2.6</version> </dependency> ``` 然后,在Flink应用程序中使用ClickHouseSinkFunction将数据写入ClickHouse。以下是一个简单的示例代码: ```java import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.connectors.clickhouse.ClickHouseSink; import org.apache.flink.streaming.connectors.clickhouse.ClickHouseSinkFunction; import org.apache.flink.streaming.connectors.clickhouse.deps.GuavaDurationConverter; import org.apache.flink.streaming.connectors.clickhouse.table.ClickHouseSinkBufferFlusher; import org.apache.flink.streaming.connectors.clickhouse.table.ClickHouseSinkConfiguration; import org.apache.flink.streaming.connectors.clickhouse.table.internal.ClickHouseStream; import org.apache.flink.streaming.connectors.clickhouse.table.internal.options.ClickHouseOptions; import org.apache.flink.types.Row; import java.sql.Types; public class FlinkClickHouseExample { public static void main(String[] args) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStream<Tuple2<String, Integer>> stream = env.socketTextStream("localhost", 9999) .map(new MapFunction<String, Tuple2<String, Integer>>() { @Override public Tuple2<String, Integer> map(String s) throws Exception { String[] parts = s.split(","); return new Tuple2<>(parts[0], Integer.parseInt(parts[1])); } }); String[] fieldNames = {"name", "age"}; int[] fieldTypes = {Types.VARCHAR, Types.INTEGER}; ClickHouseOptions options = new ClickHouseOptions("jdbc:clickhouse://localhost:8123/default", "", ""); ClickHouseSinkFunction<Tuple2<String, Integer>> sinkFunction = new ClickHouseSinkFunction<>(fieldNames, fieldTypes, options); ClickHouseSink<Tuple2<String, Integer>> sink = ClickHouseSink.buildSink(sinkFunction, new ClickHouseSinkConfiguration(), new ClickHouseSinkBufferFlusher(options)); stream.addSink(sink); env.execute("Flink ClickHouse Example"); } } ``` 上述代码使用`socketTextStream`从Socket读取输入数据,并使用`map`函数将数据转换为Tuple2对象。然后,我们定义ClickHouse的字段名和字段类型,并创建ClickHouseOptions对象,指定ClickHouse的连接URL、用户名和密码。 接下来,我们创建ClickHouseSinkFunction对象,并将其传递给ClickHouseSink。最后,将DataStream添加到sink中,通过调用`env.execute`来启动Flink作业。 通过添加适当的ClickHouse配置,并修改DataStream的源,您可以根据实际需求来改变代码。以上代码只是一个简单的示例,供参考。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值