不多bb!
先看RedisCommand设置数据结构类型时和redis结构对应关系。
Data Type | Redis Command [Sink] |
---|---|
HASH | HSET |
LIST | RPUSH, LPUSH |
SET | SADD |
PUBSUB | PUBLISH |
STRING | SET |
HYPER_LOG_LOG | PFADD |
SORTED_SET | ZADD |
SORTED_SET | ZREM |
具体的操作类:
package sink;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
public class StreamingDemoToRedis {
public static void main(String[] args) throws Exception{
String hostname="hadoop01";
String delimiter="\n";
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<String> text = env.socketTextStream(hostname, 9000, delimiter);
DataStream<Tuple2<String, String>> list_wordData = text.map(new MapFunction<String, Tuple2<String, String>>() {
public Tuple2<String, String> map(String value) throws Exception {
return new Tuple2<>("list_word", value);
}
});
//创建redis的配置
FlinkJedisPoolConfig conf = new FlinkJedisPoolConfig.Builder().setHost(hostname).setPort(6379).build();
//
RedisSink<Tuple2<String, String>> redisSink = new RedisSink<Tuple2<String, String>>(conf, new MyRedisMapper());
list_wordData.addSink(redisSink);
env.execute("sink redis");
}
public static class MyRedisMapper implements RedisMapper<Tuple2<String, String>>{
//操作redis时选择的数据类型
public RedisCommandDescription getCommandDescription() {
return new RedisCommandDescription(RedisCommand.LPUSH);
}
//表示接受的数据中获取需要操作的redis的key值
public String getKeyFromData(Tuple2<String, String> data) {
return data.f0;
}
//表示接受的数据中获取需要操作的redis的value值
public String getValueFromData(Tuple2<String, String> data) {
return data.f1;
}
}
/**
* 结果为:
* 192.168.189.128:6379> flushall
* OK
* 192.168.189.128:6379> lrange list_word 0 -1
* 1) "ss"
* 2) "rr"
* 3) "ww"
* 192.168.189.128:6379>
*/
}
scala代码是现实
具体实践类:
package streaming
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}
object StreamingDemoSinkRedis {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
import org.apache.flink.api.scala._
val text = env.socketTextStream("hadoop01",9000,'\n')
val scala_data = text.map(w=>("l_word_scala",w))
val config = new FlinkJedisPoolConfig.Builder().setHost("hadoop01").setPort(6379).build()
val redisSinks= new RedisSink[Tuple2[String,String]](config,new MyRedisMapper)
scala_data.addSink(redisSinks)
env.execute("StreamingDemoSinkRedis")
}
class MyRedisMapper extends RedisMapper[Tuple2[String,String]]{
override def getCommandDescription: RedisCommandDescription = {
new RedisCommandDescription(RedisCommand.LPUSH)
}
override def getKeyFromData(data: (String, String)): String = {
data._1
}
override def getValueFromData(data: (String, String)): String = {
data._2
}
}
}