Flink接入Redis
创建Redis连接池 RedisUtils
package com.pls.common;
import java.util.LinkedList;
import java.util.List;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.JedisShardInfo;
import redis.clients.jedis.ShardedJedisPool;
public class RedisUtils {
private static ShardedJedisPool pool;
public ShardedJedisPool GetRedisSource() {
try {
//创建一个配置对象
JedisPoolConfig config = new JedisPoolConfig();
//最大活动对象数
config.setMaxTotal(1000);
//最大能够保持idel状态的对象数
config.setMaxIdle(50);
config.setMaxWaitMillis(3000);
config.setTestOnBorrow(true);
config.setTestOnReturn(true);
// 集群
JedisShardInfo jedisShardInfo1 = new JedisShardInfo("127.0.0.1", 6379);
jedisShardInfo1.setPassword("123456");
List<JedisShardInfo> list = new LinkedList<JedisShardInfo>();
list.add(jedisShardInfo1);
pool = new ShardedJedisPool(config, list);
} catch (Exception e) {
String content = e.getClass().getName() + " Redis连接池出错: " + JsonUtils.objectToJson(e);
SysLog.error(content);
}
return pool;
}
}
写入数据到Redis,可根据自己的需求使用Redis存储类型,我这里写的是list集合 RedisSinkAdplay
package com.pls.sink;
import com.pls.common.JsonUtils;
import com.pls.common.RedisUtils;
import com.pls.models.AdvertisementPlayModel;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import redis.clients.jedis.ShardedJedis;
import redis.clients.jedis.ShardedJedisPool;
import java.util.UUID;
public class RedisSinkAdplay extends RichSinkFunction<AdvertisementPlayModel> {
ShardedJedis jedis;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
}
@Override
public void invoke(AdvertisementPlayModel value, Context context) {
if(jedis==null){
ShardedJedisPool pool = new RedisUtils().GetRedisSource();
jedis = pool.getResource();
}
String uuid = String.valueOf(UUID.randomUUID());
value.uuid=uuid;
jedis.lpush("list_adplaysinkredis", JsonUtils.objectToJson(value));
}
@Override
public void close() throws Exception {
super.close();
jedis.close();
}
}
读取Redis缓存数据,写入到数据库 EventSinkCK
package com.pls.sink;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.druid.util.JdbcUtils;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.pls.common.DateUtils;
import com.pls.common.DbConfig;
import com.pls.common.JsonUtils;
import com.pls.common.RedisUtils;
import com.pls.common.SysLog;
import com.pls.models.EventMonitoringModel;
import com.pls.models.UserDurationModel;
import redis.clients.jedis.ShardedJedis;
import redis.clients.jedis.ShardedJedisPool;
import java.sql.*;
import java.util.Date;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
public class EventSinkCK extends Thread {
private DruidDataSource dataSource;
ShardedJedis jedis=null;
public void run() {
if(dataSource==null){
dataSource = new DbConfig().OpenDb();
}
adddata();
}
private void adddata() {
while (true) {
try {
if(jedis==null){
ShardedJedisPool pool = new RedisUtils().GetRedisSource();
jedis = pool.getResource();
}
var list = jedis.lrange("list_eventsinkredis", 0, 499);
if(list!=null){
int num = list.size();
if(num==0){
Thread.sleep(10000);
continue;
}
// SysLog.redisLogData("读出数据"+num+"条");
Integer rescode= insert(list);
// SysLog.redisLogData("成功处理"+rescode+"条");
//将list做修剪,去掉已经读出的数据
if(rescode>0){
jedis.ltrim("list_eventsinkredis", num, -1);
}
Thread.sleep(1000);
}
} catch (Exception x) {
// System.out.println("读取报错");
String content = x.getClass().getName() +DateUtils.GetNowDate("yyyy-MM-dd HH:mm:ss")+ " 读取Redis数据报错: " + x;
SysLog.error(content);
continue;
}
}
}
private Integer insert(List<String> values) throws Exception {
DruidPooledConnection conn=null;
PreparedStatement stat = null;
Integer num=0;
try {
if (values == null || values.size()==0) {
return -1;
}
if (dataSource == null) {
dataSource = new DbConfig().OpenDb();
}
conn = dataSource.getConnection();
Map<String, String> dic = new Hashtable<String, String>();
if (values != null && values.size() > 0) {
for (String v : values) {
EventMonitoringModel item=JSONObject.parseObject(v,EventMonitoringModel.class);
if (item.versionnumber == null || item.versionnumber == "null") {
item.versionnumber = "";
}
if (!dic.containsKey(item.dbname)) {
String insertSql = "insert into " + item.dbname + ".t_ysbl_eventfunnel(platform, channel, dateday, eventname, eventid, userid, triggercount,lasttimestamp,eventparameter,versionnumber) values";
dic.put(item.dbname, insertSql);
}
String sql = dic.get(item.dbname);
String log_date = DateUtils.getDatetime(String.valueOf(item.dateday), "yyyy-MM-dd");
String sql_tc = String.format("(%d,%d,'%s','%s',%d,%d,%d,%d,'%s','%s')", item.platform, item.channel, log_date, item.eventname, item.eventid, item.userid,item.triggercount, item.lasttimestamp,item.eventparameter,item.versionnumber);
sql = sql + sql_tc;
dic.replace(item.dbname, sql);
num++;
}
if (dic.size() > 0) {
for (Map.Entry<String, String> item : dic.entrySet()) {
stat = conn.prepareStatement(item.getValue());
stat.execute();
}
}
// System.out.println("成功处理数据" + num + "条");
}
} catch (Exception e) {
// System.out.println("插入报错");
String content = e.getClass().getName() +DateUtils.GetNowDate("yyyy-MM-dd HH:mm:ss")+ " Sink报错: " + e;
SysLog.error(content);
num=-1;
}finally {
JdbcUtils.close(stat);
JdbcUtils.close(conn);
}
return num;
}
}
在启动函数里面调用Redis遍历
EventSinkCK event = new EventSinkCK();
event.start();
在处理函数里面调用写入Redis算子
windowData.addSink(new RedisSinkEvent()).setParallelism(1);
至此,flink接入Redis缓冲区完成,主要是减轻数据库写入压力,防止并发,我这里数据库是用的clickhouse
本人初涉java,写的不好,请多多建议