spark读取oracle写入kafka,sparkStreaming读取kafka写入hive表

packagehiveimportjava.io.Fileimportorg.apache.kafka.clients.consumer.ConsumerRecordimportorg.apache.kafka.common.serialization.StringDeserializerimportorg.apache.log4j.{Level, Logger}importorg.apache.spark.sql.{Row, SparkSession}importorg.apache.spark.streaming.{Seconds, StreamingContext}importorg.apache.spark.streaming.dstream.InputDStreamimportorg.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}/*** spark消费多个topic的数据写入不同的hive表*/object SparkToHive {

def main(args: Array[String]): Unit={

Logger.getLogger("org.apache.spark").setLevel(Level.WARN)

Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.WARN)

Logger.getLogger("org.apache.kafka.clients.consumer").setLevel(Level.WARN)

val warehouseLocation= new File("hdfs://cluster/hive/warehouse").getAbsolutePath

@transientval spark=SparkSession

.builder()

.appName("Spark SQL To Hive")

.config("spark.sql.warehouse.dir", warehouseLocation)

.enableHiveSupport()

.getOrCreate()

spark.conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

@transientval sc=spark.sparkContext

val scc= new StreamingContext(sc, Seconds(1))

val kafkaParams=Map[String, Object]("auto.offset.reset" -> "latest", //latest,earliest

"value.deserializer" ->classOf[StringDeserializer]

,"key.deserializer" ->classOf[StringDeserializer]

,"bootstrap.servers" -> "10.200.10.24:6667,10.200.10.26:6667,10.200.10.29:6667","group.id" -> "test_jason","enable.auto.commit" -> (true: java.lang.Boolean)

)

var stream: InputDStream[ConsumerRecord[String, String]]= nullval topics= Array("test", "test1","test2")

stream=KafkaUtils.createDirectStream[String, String](

scc,

LocationStrategies.PreferConsistent,

ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)

)

stream.foreachRDD(rdd=>{if (!rdd.isEmpty()) {

val cache_rdd=rdd.map(_.value()).cache()//a 表

val a = cache_rdd.filter(_.contains("hello"))//b 表

val b = cache_rdd.filter(_.contains("jason"))//都可以打印结果,下面的代码就不在写了,可以参考另一篇博客里面写hive的

a.foreach(println)

b.foreach(println)

}

})

scc.start()

scc.awaitTermination()

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值