Flink:将Redis中的数据作为Flink的数据源

27 篇文章 0 订阅
22 篇文章 0 订阅

依赖

<!--flink核心包-->
<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-java</artifactId>
    <version>1.7.2</version>
</dependency>
<!--flink流处理包-->
<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-streaming-java_2.12</artifactId>
    <version>1.7.2</version>
    <!--<scope>provided</scope>-->
</dependency>

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-connector-redis_2.11</artifactId>
    <version>1.1.5</version>
</dependency>

 代码

import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.exceptions.JedisConnectionException;

/**
 * redis中进行数据初始化
 * 
 * hset user1 id 1
 * hset user1 name jack
 * hset user1 age 24
 * hset user1 address SH
 * 
 * 需要把属性名和属性值的对应关系组装成java的hashmap
 *
 *  SourceFunction中泛型为HashMap<String, String>数据类型即为从redis处理后给flink的数据类型
 */
public class MyRedisSource implements SourceFunction<HashMap<String, String>> {
    private Logger logger = LoggerFactory.getLogger(MyRedisSource.class);

    private final long SLEEP_MILLION = 60000;

    private boolean isRunning = true;
    private Jedis jedis = null;

    // SourceContext中泛型为HashMap<String, String>数据类型即为从redis处理后给flink的数据类型
    public void run(SourceContext<HashMap<String, String>> ctx) throws Exception {

        this.jedis = new Jedis("192.168.25.129", 6379);
        //存储所有国家和大区的对应关系
        HashMap<String, String> keyValueMap = new HashMap<String, String>();
        while (isRunning) {
            try {
                keyValueMap.clear();
                for (int i = 1; i <= 100; i++) {
                    String key1 = "user" + i;
                    Map<String, String> map = jedis.hgetAll(key1);
                    for (Map.Entry<String, String> entry : map.entrySet()) {
                        String key2 = entry.getKey();
                        String value = entry.getValue();
                        keyValueMap.put(key1 + "." + key2, value);
                    }
                }
                if (keyValueMap.size() > 0) {
                    ctx.collect(keyValueMap);
                } else {
                    logger.warn("从redis中获取的数据为空!!!");
                }
                Thread.sleep(SLEEP_MILLION);
            } catch (JedisConnectionException e) {
                logger.error("redis链接异常,重新获取链接", e.getCause());
                jedis = new Jedis("192.168.25.129", 6379);
            } catch (Exception e) {
                logger.error("source 数据源异常", e.getCause());
            }
        }
    }

    public void cancel() {
        isRunning = false;
        if (jedis != null) {
            jedis.close();
        }
    }
}
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class FlinkSourceFromRedis {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStream<HashMap<String, String>> data = env.addSource(new MyRedisSource());

        data.print();
        env.execute();
    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

程序员无羡

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值