kafka
DearNingning
这个作者很懒,什么都没留下…
展开
-
spark-streaming
import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.SparkConf import org.apache.spark.rdd.RDD import org.apache.spark.streaming.{Seconds, StreamingContext} import o原创 2021-06-27 22:54:46 · 116 阅读 · 1 评论 -
SPARK-STREAMING
import org.apache.spark.SparkConf import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream} import org.apache.spark.streaming.{Seconds, StreamingContext} //只算当前批次 object wordandcount { def main(args: Array[String]): Unit = { val conf:原创 2021-06-24 23:36:55 · 111 阅读 · 2 评论 -
kafka(一)
生产者程序: import java.util.Properties import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import org.apache.kafka.common.serialization.StringSerializer object ProducerDemo { def main(args: Array[String]): Unit = { val properties= new原创 2021-06-22 21:05:02 · 93 阅读 · 2 评论