之前尝试过使用kafka自带的topic进行offset管理的实践
但这是kafka0.11才有的内容,目前很多客户都是kafka0.10,因此又去尝试了使用mysql管理,并存入redis
直接贴代码了
PS:在这里offset没有进行初始化,待补充
package main.scala
import kafka.common.TopicAndPartition
import kafka.message.MessageAndMetadata
import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Duration, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka.{HasOffsetRanges, KafkaUtils}
import scalikejdbc._
import redis.clients.jedis.{Jedis, JedisPool}
//需求:消费者自定义控制offset
//在这里offset保存到mysql
object kafka_offset_tomysql {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("kafka-spark-demo")
val scc = new StreamingContext(sparkConf, Duration(5000)) //new一个spark-streaming的上下文
val kafkaParam = Map(
"metadata.broker.list" -> "localhost:9092",// kafka的broker list地址
"auto.offset.reset" -> "smallest"
)
//mysql连接,获取offset
Class.forName("com.mysql.jdbc.Driver")
ConnectionPool.singleton("jdbc:mysql://localhost:3306/offset_test", "root", "huangxiao")
val fromOffsets = DB.readOnly { implicit session =>
sql"select topic,partision,offset from test1".
map { resultSet =>
TopicAndPartition(resultSet.string(1), resultSet.int(2)) -> resultSet.long(3)
}.list.apply().toMap
}
val messageHandler = (mam: MessageAndMetadata[String, String]) => (mam.topic, mam.message()) //构建MessageAndMetadata,这个所有使用情况都是一样的,就这么写
//定义流.这种方法是不会在zookeeper的/consumers中创建一个新的groupid实例的
val stream: InputDStream[(String, String)] = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder, (String, String)](scc, kafkaParam, fromOffsets, messageHandler)
// stream.print()//为了放出时间戳
stream.foreachRDD { rdd =>
val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges//这个语句可以返回当前rdd所更新到的offset值(OffsetRange(topic: 'kafka_test4', partition: 0, range: [1 -> 4]))
//将offset存回mysql
DB.localTx { implicit session =>
offsetRanges.foreach { offsetRange =>
val offsetRows =
sql"""update test1 set `offset` = ${offsetRange.untilOffset}
where `topic` = ${offsetRange.topic} and `partision` = ${offsetRange.partition}
and `offset` = ${offsetRange.fromOffset}
""".update.apply()
}
}
//将结果导入redis
rdd.foreachPartition{partitionOfRecords =>
val jr = new Jedis("localhost", 6379)
partitionOfRecords.foreach( record=>jr.hincrBy("hash_test", record._2, 1))}
}
scc.start() // 真正启动程序
scc.awaitTermination()
}
}
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.huangxiao</groupId>
<artifactId>streaming</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming_2.11</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-streaming-kafka -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-streaming-kafka_2.11</artifactId>
<version>1.6.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.47</version>
</dependency>
<!---->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>1.4.4</version>
<exclusions>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.scalikejdbc/scalikejdbc -->
<dependency>
<groupId>org.scalikejdbc</groupId>
<artifactId>scalikejdbc_2.11</artifactId>
<version>2.2.1</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/redis.clients/jedis -->
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.9.0</version>
</dependency>
</dependencies>
</project>