kafka安装

首先启动zookeeper

wget https://archive.apache.org/dist/kafka/2.3.0/kafka_2.11-2.3.0.tgz

tar -xzvf kafka_2.11-2.3.0.tgz

#执行脚本

./kafka-server-start.sh ../config/server.properties

#查看进程

jps

创建topic进行测试

./kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test

查看topic列表

./kafka-topics.sh --list --zookeeper localhost:2181

输出:test

生产者消息测试

./kafka-console-producer.sh --broker-list localhost:9092 --topic test

消费者消息测试

1 #执行脚本(使用kafka-console-consumer.sh 接收消息并在终端打印)

2 ./kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test --from-beginning

java代码实现生产者消费者

1)maven项目添加kafka依赖

<dependency>
 <groupId>org.apache.kafka</groupId>
   <artifactId>kafka-clients</artifactId>
   <version>2.3.0</version>
</dependency>

(2)java代码实现

  package com.server.kafka;
  
  import org.apache.kafka.clients.consumer.ConsumerConfig;
  import org.apache.kafka.clients.consumer.ConsumerRecord;
  import org.apache.kafka.clients.consumer.ConsumerRecords;
  import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.producer.KafkaProducer;
  import org.apache.kafka.clients.producer.ProducerConfig;
  import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
 
 import java.util.Collections;
 import java.util.Properties;
 import java.util.Random;
 

 public class KafakaExecutor {

    public static String topic = "test";
 
     public static void main(String[] args) {
        new Thread(()-> new Producer().execute()).start();
         new Thread(()-> new Consumer().execute()).start();
     }
 
     public static class Consumer {

         private void execute() {
             Properties p = new Properties();
             p.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.21.181:9092");
             p.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
             p.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
             p.put(ConsumerConfig.GROUP_ID_CONFIG, topic);

            KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(p);
           // 订阅消息
            kafkaConsumer.subscribe(Collections.singletonList(topic));

           while (true) {
                 ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
                 for (ConsumerRecord<String, String> record : records) {
                     System.out.println(String.format("topic:%s,offset:%d,消息:%s", //
                            record.topic(), record.offset(), record.value()));
                 }
             }
         }
     }
 

    public static class Producer {

        private void execute() {
            Properties p = new Properties();
            //kafka地址,多个地址用逗号分割
            p.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.21.181:9092");
            p.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
             p.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
             KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(p);
 
             try {
                 while (true) {
                     String msg = "Hello," + new Random().nextInt(100);
                     ProducerRecord<String, String> record = new ProducerRecord<>(topic, msg);
                     kafkaProducer.send(record);
                     System.out.println("消息发送成功:" + msg);
                     Thread.sleep(500);
                 }
             } catch (InterruptedException e) {
                 e.printStackTrace();
             } finally {
                 kafkaProducer.close();
             }
         }
 
     }
 }

测试结果(上面使用脚本命令执行消费者的终端也会同步输出消息数据)

  • 4
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值