使用flink将数据流写入到Clickhouse的工具类
package com.gmall.realtime.util;
import com.gmall.realtime.bean.VisitorStats;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.sql.PreparedStatement;
import java.sql.SQLException;
public class MySinkUtil {
public static void main(String[] args) {
getClickHouseSink("abc","a", VisitorStats.class);
}
public static <T> SinkFunction<VisitorStats> getClickHouseSink(String db,
String table,
Class<T> tClass) {
String url = "jdbc:clickhouse://hadoop162:8123/"+db;
String driver = "ru.yandex.clickhouse.ClickHouseDriver";
final StringBuilder sql = new StringBuilder();
sql.append("insert into ").append(table).append("(");
final Field[] fields = tClass.getDeclaredFields();
for (Field field : fields) {
if (!Modifier.isTransient(field.getModifiers())){
final String fieldName = field.getName();
sql.append(fieldName).append(",");
}
}
sql.deleteCharAt(sql.length()-1);
sql.append(" ) values ( ");
for (Field field : fields) {
if (!Modifier.isTransient(field.getModifiers())){
sql.append("?,");
}
}
sql.deleteCharAt(sql.length()-1);
sql.append(" )");
return getJDBCSink(url,driver,sql.toString());
}
private static <T> SinkFunction<T> getJDBCSink(String url,
String driver,
String sql) {
return JdbcSink.<T>sink(sql,
new JdbcStatementBuilder<T>() {
@Override
public void accept(PreparedStatement ps,
T t) throws SQLException {
final Field[] fields = t.getClass().getDeclaredFields();
try {
for (int i = 0,position=1; i < fields.length; i++) {
if (!Modifier.isTransient(fields[i].getModifiers())){
fields[i].setAccessible(true);
ps.setObject(position++,fields[i].get(t));
}
}
}catch (Exception e){
e.printStackTrace();
}
}
},
new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
.withUrl(url)
.withDriverName(driver)
.build());
}
}