package day14
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{ Seconds, StreamingContext}
/**
* Kafka的Receive方法
*/
object KafkaWC {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("kafkaWC").setMaster("local[2]")
val ssc = new StreamingContext(conf,Seconds(5))
//接下来编写 kafka的配置信息
//首先我们编写kafka依赖的zk信息
val zks = "192.168.14.131:2181,192.168.14.131:2182,192.168.14.131:2183"
//然后是kafka的消费者组
val groupId = "gp1"
//Topic的名字 Map的key是topic名字,第二参数是线程数
val topics = Map[String,Int]("tt"->1)
//创建kafka的输入数据流,来获取kafka中的数据
val data = KafkaUtils.createStream(ssc,zks,groupId,topics)
//获取到的数据是键值对的格式(key,value)
/