kafka
DearNingning
这个作者很懒,什么都没留下…
展开
-
spark-streaming
import org.apache.kafka.clients.consumer.ConsumerRecordimport org.apache.kafka.common.serialization.StringDeserializerimport org.apache.spark.SparkConfimport org.apache.spark.rdd.RDDimport org.apache.spark.streaming.{Seconds, StreamingContext}import o原创 2021-06-27 22:54:46 · 130 阅读 · 1 评论 -
SPARK-STREAMING
import org.apache.spark.SparkConfimport org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}import org.apache.spark.streaming.{Seconds, StreamingContext}//只算当前批次object wordandcount { def main(args: Array[String]): Unit = { val conf:原创 2021-06-24 23:36:55 · 131 阅读 · 2 评论 -
kafka(一)
生产者程序:import java.util.Propertiesimport org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}import org.apache.kafka.common.serialization.StringSerializerobject ProducerDemo { def main(args: Array[String]): Unit = { val properties= new原创 2021-06-22 21:05:02 · 101 阅读 · 2 评论
分享