实现功能:
从kafka读取某一主题,消费者组的偏移量
基于读出的offset,创建kafka读取流程
把各个分区的偏移量 保存到redis。
import Kafka010.Utils.{MyKafkaUtils, RedisUtilsDemo}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
object Test{
def main(args: Array[String]): Unit = {
//创建spark环境
val conf = new SparkConf()
.setMaster("local[*]")
.setAppName(s"${this.getClass.getCanonicalName}")
//创建sparkStreamingContext
val ssc = new StreamingContext(conf, Seconds(2))
//创建kafka相关参数
val groupId = "SparkKafka010"
val topics = List("datacollection")
//获取kafka参数,这里是自定义的MyKafkaUtils类,后面给出类的具体内容
val kafkaParams