flink把kafka数据写进redis的简单实现
1.pom文件
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<scala.version>2.11.11</scala.version>
<spark.version>2.2.1</spark.version>
<hadoop.version>2.7.2</hadoop.version>
<kafka.version>0.10.2.1</kafka.version>
<fastjson.version>1.2.51</fastjson.version>
<akka.version>2.4.17</akka.version>
<scala.compat.version>2.11</scala.compat.version>
<encoding>UTF-8</encoding>
<flink.version>1.11.3</flink.version>
</properties>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>${kafka.version}</version>
</dependency>
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.51</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-blink_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-common</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
</dependency>
2.自定义redis sink
package com.test
import org.apache.flink.streaming.connectors.redis.common.mapper.{RedisCommand, RedisCommandDescription, RedisMapper}
class UserDefineRedisMapper extends RedisMapper[(String,Int)] {
override def getCommandDescription: RedisCommandDescription = {
new RedisCommandDescription(RedisCommand.HSET,"crm_supper_case_details")
}
override def getKeyFromData(t: (String, Int)): String = {
t._1
}
override def getValueFromData(t: (String, Int)): String = {
t._2.toString
}
}
3.flink读取kafka数据写进redis
package com.test
import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.streaming.connectors.redis.RedisSink
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig
import org.apache.kafka.clients.consumer.ConsumerConfig
object DataSink_redis {
def main(args: Array[String]): Unit = {
val fsEnv = StreamExecutionEnvironment.getExecutionEnvironment
val conf = new FlinkJedisPoolConfig.Builder()
.setHost("mastercs")
.setPort(6379).build()
val props = new Properties()
props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"mastercs:9092,slave1cs:9092,slave2cs:9092")
props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"g1")
val flinkkafkaConsumer = new FlinkKafkaConsumer[String]("crm_supper_case_details",new SimpleStringSchema(),props)
val ds = fsEnv.addSource(flinkkafkaConsumer)
ds.setParallelism(10)
ds.map((_,1))
.keyBy(0)
.sum(1)
.addSink(new RedisSink(conf,new UserDefineRedisMapper))
fsEnv.execute("FlinkWordCountsKafkaSource01")
}
}