import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import scala.util.Try
object StreamingMaxwellKafka {
def main(args: Array[String]): Unit = {
val brokers = ConfigUtil.getConfig(Constants.KAFKA_BOOTSTRAP_SERVERS)
val topics = Array(Constants.VECHE)
val conf = new SparkConf().setMaster("local[4]").setAppName("sparkMaxwell")
val group_id:String = "vech_group"
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> brokers,
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer&
sparkStreaming连接kafka整合hbase
最新推荐文章于 2021-04-25 11:36:33 发布