CREATE database test_db;
CREATE table test_db.test(
id Int32,
date Date,
age Int8
)
engine=MergeTree()
order by id
;
INSERT into test_db.test values (4, '2020-01-12', 19), (5, '2020-02-13', 3);
SELECT * FROM test_db.test;
- flink1.11 connector JDBC 方式sink clickhouse
package com.skin
import java.sql.PreparedStatement
import org.apache.flink.streaming.api.scala._
import org.apache.flink.connector.jdbc._
object SkinCH {
case class User(id: Int, date: String, age:Int)
class CHSinkBuilder extends JdbcStatementBuilder[User] {
override def accept(t: PreparedStatement, u: User): Unit = {
t.setInt(1, u.id)
t.setString(2, u.date)
t.setInt(3, u.age)
}
}
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val filePath = "src/main/resources/user.csv"
val dataStream = env.readTextFile(filePath)
.map(line => {
val arr = line.split(",")
User(arr(0).toInt, "2020-01-03", arr(3).toInt)
})
val skinSql =
"""
|INSERT INTO test(
|id, date, age
|) VALUES(
|?,?,?
|)
|""".stripMargin
dataStream.addSink(
JdbcSink.sink(
skinSql,
new CHSinkBuilder,
new JdbcExecutionOptions.Builder()
.withBatchSize(1)
.build(),
new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
.withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
.withUrl("jdbc:clickhouse://jeff200:8123/test_db")
.withUsername("admin")
.withPassword("123456")
.build()
)
)
env.execute("SinkCH Job")
}
}
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>0.2.4</version>
</dependency>
1,男,张三,20,1605970941
2,女,莉莉,30,1605970922
3,女,红红,30,1605970913
4,男,李四,24,1605970904
5,男,王五,25,1605970965
6,男,小明,20,1605970946