依赖
<!--flink核心包-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.7.2</version>
</dependency>
<!--flink流处理包-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.7.2</version>
<!--<scope>provided</scope>-->
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.1.5</version>
</dependency>
代码
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.exceptions.JedisConnectionException;
/**
* redis中进行数据初始化
*
* hset user1 id 1
* hset user1 name jack
* hset user1 age 24
* hset user1 address SH
*
* 需要把属性名和属性值的对应关系组装成java的hashmap
*
* SourceFunction中泛型为HashMap<String, String>数据类型即为从redis处理后给flink的数据类型
*/
public class MyRedisSource implements SourceFunction<HashMap<String, String>> {
private Logger logger = LoggerFactory.getLogger(MyRedisSource.class);
private final long SLEEP_MILLION = 60000;
private boolean isRunning = true;
private Jedis jedis = null;
// SourceContext中泛型为HashMap<String, String>数据类型即为从redis处理后给flink的数据类型
public void run(SourceContext<HashMap<String, String>> ctx) throws Exception {
this.jedis = new Jedis("192.168.25.129", 6379);
//存储所有国家和大区的对应关系
HashMap<String, String> keyValueMap = new HashMap<String, String>();
while (isRunning) {
try {
keyValueMap.clear();
for (int i = 1; i <= 100; i++) {
String key1 = "user" + i;
Map<String, String> map = jedis.hgetAll(key1);
for (Map.Entry<String, String> entry : map.entrySet()) {
String key2 = entry.getKey();
String value = entry.getValue();
keyValueMap.put(key1 + "." + key2, value);
}
}
if (keyValueMap.size() > 0) {
ctx.collect(keyValueMap);
} else {
logger.warn("从redis中获取的数据为空!!!");
}
Thread.sleep(SLEEP_MILLION);
} catch (JedisConnectionException e) {
logger.error("redis链接异常,重新获取链接", e.getCause());
jedis = new Jedis("192.168.25.129", 6379);
} catch (Exception e) {
logger.error("source 数据源异常", e.getCause());
}
}
}
public void cancel() {
isRunning = false;
if (jedis != null) {
jedis.close();
}
}
}
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
public class FlinkSourceFromRedis {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<HashMap<String, String>> data = env.addSource(new MyRedisSource());
data.print();
env.execute();
}
}