网上学习资料一大堆,但如果学到的知识不成体系,遇到问题时只是浅尝辄止,不再深入研究,那么很难做到真正的技术提升。
一个人可以走的很快,但一群人才能走的更远!不论你是正从事IT行业的老鸟或是对IT行业感兴趣的新人,都欢迎加入我们的的圈子(技术交流、学习资源、职场吐槽、大厂内推、面试辅导),让我们一起学习成长!
vim conf/flink-conf.yaml
修改如下内容:
# JobManager节点地址.
jobmanager.rpc.address: localhost #或者直接写ip地址
jobmanager.bind-host: 0.0.0.0
rest.address: localhost #或者直接写ip地址
rest.bind-address: 0.0.0.0
# TaskManager节点地址.需要配置为当前机器名
taskmanager.bind-host: 0.0.0.0
taskmanager.host: localhost #或者直接写ip地址
因为是单机模式,只用了一台服务器,所以vim workers是localhost,vim masters是localhost:8081
3.准备
把写好的jar包(胖包)放在flink的lib目录下,flink启动时会自动扫描lib目录,注意log4j依赖冲突问题,改屏蔽的依赖在打包时要屏蔽
standalone的应用模式下不会提前创建集群, 所以不能调用 start-cluster.sh 脚本。我们可以使用同样在 bin 目录下的 standalone-job.sh 来创建一个 JobManager
4.启动 JobManager。(不用指定jar包,启动脚本会自动扫描 lib 目录下所有的jar 包,这里直接指定作业入口类)
./bin/standalone-job.sh start --job-classname zhilong.com.dw.dwd.FlinkSinkClickhouse
./bin/standalone-job.sh start --job-classname zhilong.com.dw.dwd.KafkaToClickhouse
./bin/taskmanager.sh start
5.启动 TaskManager。
./bin/taskmanager.sh start
6.如果希望停掉集群,同样可以使用脚本,命令如下。(这里未测试成功别停止脚本)
./bin/standalone-job.sh stop
./bin/taskmanager.sh stop
./bin/jobmanager.sh stop
./bin/taskmanager.sh stop
7.根据代码里设置的Kafka数据属性在clickhouse里建表
CREATE TABLE
ods_countlyV2 (
appKey String,
appVersion String,
deviceId String,
phone_no String
) ENGINE = MergeTree ()
ORDER BY
(appKey, appVersion, deviceId, phone_no);
8.起一个Kafka生产者发送一条消息,然后观察clickhouse对应表里的情况
9.观察clickhouse表里数据的情况
#代码
1.主程序类
package com.kszx;
import com.alibaba.fastjson.JSON;
import com.kszx.Mail;
import com.kszx.MyClickHouseUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.util.HashMap;
import java.util.Properties;
public class FlinkSinkClickhouse {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(5000);
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
// source
String topic = "topic_test2";
Properties props = new Properties();
// 设置连接kafka集群的参数
props.setProperty("bootstrap.servers", "172.xx.xxx.x:9092,172.xx.xxx.x:9092,172.xx.xxx.x:9092");
// 定义Flink Kafka Consumer
FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), props);
consumer.setStartFromGroupOffsets();
consumer.setStartFromEarliest(); // 设置每次都从头消费
// 添加source数据流
DataStreamSource<String> source = env.addSource(consumer);
source.print("111");
System.out.println(source);
SingleOutputStreamOperator<Mail> dataStream = source.map(new MapFunction<String, Mail>() {
@Override
public Mail map(String value) throws Exception {
HashMap<String, String> hashMap = JSON.parseObject(value, HashMap.class);
// System.out.println(hashMap);
String appKey = hashMap.get("appKey");
String appVersion = hashMap.get("appVersion");
String deviceId = hashMap.get("deviceId");
String phone_no = hashMap.get("phone_no");
Mail mail = new Mail(appKey, appVersion, deviceId, phone_no);
// System.out.println(mail);
return mail;
}
});
dataStream.print();
// sink
String sql = "INSERT INTO testmaxwell1.ods_countlyV2 (appKey, appVersion, deviceId, phone_no) " +
"VALUES (?, ?, ?, ?)";
MyClickHouseUtil ckSink = new MyClickHouseUtil(sql);
dataStream.addSink(ckSink);
env.execute();
}
}
2.工具类写入clickhouse
package com.kszx;
import com.kszx.Mail;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import ru.yandex.clickhouse.ClickHouseConnection;
import ru.yandex.clickhouse.ClickHouseDataSource;
import ru.yandex.clickhouse.settings.ClickHouseProperties;
import ru.yandex.clickhouse.settings.ClickHouseQueryParam;
import java.sql.PreparedStatement;
import java.util.HashMap;
import java.util.Map;
public class MyClickHouseUtil extends RichSinkFunction<Mail> {
private ClickHouseConnection conn = null;
String sql;
public MyClickHouseUtil(String sql) {
this.sql = sql;
}
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
return ;
}
@Override
public void close() throws Exception {
super.close();
if (conn != null)
{
conn.close();
}
}
@Override
public void invoke(Mail mail, Context context) throws Exception {
String url = "jdbc:clickhouse://172.xx.xxx.xxx:8123/testmaxwell1";
ClickHouseProperties properties = new ClickHouseProperties();
properties.setUser("default");
properties.setPassword("xxxxxx");
properties.setSessionId("default-session-id2");
ClickHouseDataSource dataSource = new ClickHouseDataSource(url, properties);
Map<ClickHouseQueryParam, String> additionalDBParams = new HashMap<>();
additionalDBParams.put(ClickHouseQueryParam.SESSION_ID, "new-session-id2");
try {
conn = dataSource.getConnection();
PreparedStatement preparedStatement = conn.prepareStatement(sql);
preparedStatement.setString(1,mail.getAppKey());
preparedStatement.setString(2, mail.getAppVersion());
preparedStatement.setString(3, mail.getDeviceId());
preparedStatement.setString(4, mail.getPhone_no());
preparedStatement.execute();
}
catch (Exception e){
e.printStackTrace();
}
}
}
3.表属性类
package com.kszx;
//package com.demo.flink.pojo;
public class Mail {
private String appKey;
private String appVersion;
private String deviceId;
private String phone_no;
public Mail(String appKey, String appVersion, String deviceId, String phone_no) {
this.appKey = appKey;
this.appVersion = appVersion;
this.deviceId = deviceId;
this.phone_no = phone_no;
}
public String getAppKey() {
return appKey;
}
public void setAppKey(String appKey) {
this.appKey = appKey;
}
public String getAppVersion() {
![img](https://img-blog.csdnimg.cn/img_convert/adb615a21dd10518a2e80540375e4b0e.png)
![img](https://img-blog.csdnimg.cn/img_convert/e9bf2fa5d729d429656abc08a73e35b7.png)
![img](https://img-blog.csdnimg.cn/img_convert/c6767e741d5bcd4e865f993636d3e3fe.png)
**既有适合小白学习的零基础资料,也有适合3年以上经验的小伙伴深入学习提升的进阶课程,涵盖了95%以上大数据知识点,真正体系化!**
**由于文件比较多,这里只是将部分目录截图出来,全套包含大厂面经、学习笔记、源码讲义、实战项目、大纲路线、讲解视频,并且后续会持续更新**
**[需要这份系统化资料的朋友,可以戳这里获取](https://bbs.csdn.net/forums/4f45ff00ff254613a03fab5e56a57acb)**
.(img-MqCrtuz8-1715445591725)]
[外链图片转存中...(img-zUv0MJYl-1715445591725)]
**既有适合小白学习的零基础资料,也有适合3年以上经验的小伙伴深入学习提升的进阶课程,涵盖了95%以上大数据知识点,真正体系化!**
**由于文件比较多,这里只是将部分目录截图出来,全套包含大厂面经、学习笔记、源码讲义、实战项目、大纲路线、讲解视频,并且后续会持续更新**
**[需要这份系统化资料的朋友,可以戳这里获取](https://bbs.csdn.net/forums/4f45ff00ff254613a03fab5e56a57acb)**