packagehiveimportjava.io.Fileimportorg.apache.kafka.clients.consumer.ConsumerRecordimportorg.apache.kafka.common.serialization.StringDeserializerimportorg.apache.log4j.{Level, Logger}importorg.apache.spark.sql.{Row, SparkSession}importorg.apache.spark.streaming.{Seconds, StreamingContext}importorg.apache.spark.streaming.dstream.InputDStreamimportorg.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}/*** spark消费多个topic的数据写入不同的hive表*/object SparkToHive {
def main(args: Array[String]): Unit={
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.WARN)
Logger.getLogger("org.apache.kafka.clients.consumer").setLevel(Level.WARN)
val warehouseLocation= new File("hdfs://cluster/hive/warehouse").getAbsolutePath
@transientval spark=SparkSession