kafka(一)

生产者程序1:

import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
object ProducerDemo {
  def main(args: Array[String]): Unit = {
   val properties= new Properties()
    properties.setProperty("bootstrap.servers", "linux01:9092,linux02:9092,linux03:9092")
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", classOf[StringSerializer].getName)
   val producer= new KafkaProducer[Int,String](properties)
    val topic = "consu"
   val record= new ProducerRecord[Int,String](topic,"超超 我是你babi")
    producer.send(record)
    producer.close()
   
  }
}

消费者端:

[root@linux01 /]# /opt/apps/kafka_2.12-2.6.2/bin/kafka-console-consumer.sh --topic consu --bootstrap-server linux01:9092,linux02:9092,linux03:9092 --from-beginning  
heloo

消费者:
import java.time.Duration
import java.util.Properties
import java.util

import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer}
import org.apache.kafka.common.serialization.StringDeserializer
//消费者一经启动就会一直消费,有就处理没有就等着

object ConsumerDemo {
  def main(args: Array[String]): Unit = {
    val properties= new Properties()
    properties.setProperty("bootstrap.servers", "linux01:9092,linux02:9092,linux03:9092")
    //反序列化deserializer
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", classOf[StringDeserializer].getName)
    //消费者对应的消费者组
    properties.setProperty("group.id","maimaimai03")
    properties.setProperty("auto.offset.reset","earliest")
    val consumer=new KafkaConsumer[String,String](properties)
    val topics: util.List[String] = util.Arrays.asList("consu")
    consumer.subscribe(topics)
    while(true){
      //poll投票可以理解为拉去数据的,有就读取,没有就等着可以写一个while循坏,设置一个超时时间,如果大于这个时间就抛弃这个数据,抛出异常
     val consumerRecords: ConsumerRecords[String, String] = consumer.poll((Duration.ofSeconds(5)))
       //consumerRecords一次可以读多条数据,poll处理数据时有可能一次读一条也有可能一次读100条也有可能一条不读
      import  scala.collection.JavaConverters._
      //有数据(>=1)才会执行这个for
      for(record <- consumerRecords.asScala){
        println(record.toString)
      }
     // println("aaaa")

    }


  }

}

生产者程序2:

import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
object ProducerDemo01 {
  def main(args: Array[String]): Unit = {
    val properties= new Properties()
    properties.setProperty("bootstrap.servers", "linux01:9092,linux02:9092,linux03:9092")
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", classOf[StringSerializer].getName)
    val producer= new KafkaProducer[Int,String](properties)
    val topic = "consu"
    for(i<- 1 to 1000){
      //没有指定分区编号,生产者会将数据轮询写入到kafka多个分区中,(不是写一条就切换)
      val record= new ProducerRecord[Int,String](topic,"超超 我是你zuzong")
      producer.send(record)
    }
    producer.close()
  }

}

生产者程序3:

import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer

object ProducerDemo02 {

  def main(args: Array[String]): Unit = {
    val properties= new Properties()
    properties.setProperty("bootstrap.servers", "linux01:9092,linux02:9092,linux03:9092")
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", classOf[StringSerializer].getName)
    val producer= new KafkaProducer[String ,String](properties)
    val topic = "consu"
    for(i <- 3000 to 4000){
      //指定分区编号,生产者会将数据写入到kafka指定的分区,(不是写一条就切换)
      val record= new ProducerRecord[String,String](topic,0,"wnnn","超超 我是你nn" + i)
      producer.send(record)
      println("---")
    }
    producer.close()
  }

}

生产者程序4:

import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer

object ProducerDemo5 {

  def main(args: Array[String]): Unit = {

    val properties = new Properties();
    // 连接kafka节点
    properties.setProperty("bootstrap.servers", "linux01:9092,linux02:9092,linux03:9092")
    //指定key序列化方式
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    //指定value序列化方式
    properties.setProperty("value.serializer", classOf[StringSerializer].getName) // 两种写法都行

    //Kafka的客户端
    val producer = new KafkaProducer[String, String](properties)

    val topic = "wordcount"

    for (i <- 3001 to 4000) {
      val partition = i % 6; //根据一个变量动态计算出来的,相对均匀
      val record = new ProducerRecord[String, String](topic, partition, "doit", "hello doit " + i)
      producer.send(record)
    }

    println("已经发生数据")
    //producer.flush()

    producer.close()

  }

}
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值