用Flink将kafka的数据存到redis中

依赖
<dependency>
  <groupId>org.apache.bahir</groupId>
  <artifactId>flink-connector-redis_2.11</artifactId>
  <version>1.0</version>
</dependency>

代码

//1.创建StreamExecutionEnvironment
val env=StreamExecutionEnvironment.getExecutionEnvironment
val props=new Properties()
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"CentOS:9092")
props.put(ConsumerConfig.GROUP_ID_CONFIG,"g1")

val kafkaConsumer=new FlinkKafkaConsumer("topic01",new SimpleStringSchema(),props)
val redisConfig=new FlinkJedisPoolConfig.Builder()
.setHost("CentOS")
.setPort(6379)
.build()

val redisSink= new RedisSink(redisConfig,new WordPairRedisMapper)
//2.设置Source
val lines:DataStream[String]=env.addSource[String](kafkaConsumer)
lines.flatMap(_.split("\\s+"))
.map((_,1))
.keyBy(0)
.sum(1)
.addSink(redisSink)
//4.执行任务
env.execute("wordcount")
package com.baizhi.demo03

import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}

class WordPairRedisMapper extends RedisMapper[(String,Int)]{
  override def getCommandDescription: RedisCommandDescription = {
    new RedisCommandDescription(RedisCommand.HSET,"clicks")
  }
  override def getKeyFromData(t: (String, Int)): String = {
    t._1
  }

  override def getValueFromData(t: (String, Int)): String = {
    t._2.toString
  }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值