java生产消费kafka消息

本文展示了如何使用 Apache Kafka 实现生产者和消费者。生产者代码创建了一个 KafkaProducer 对象,设置配置并发送字符串消息。消费者代码创建 KafkaConsumer 对象,订阅主题,并从 Kafka 集群中消费消息。示例中消费者自动提交偏移量并从最早的消息开始消费。
摘要由CSDN通过智能技术生成

maven依赖

<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka-clients</artifactId>
    <version>2.3.0</version>
</dependency>

生产者代码

package com.kafka.demo;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;

public class KafkaProducerTest implements Runnable {

    private final KafkaProducer<String, String> producer;
    private final String topic;


    public  KafkaProducerTest(String topicName) {
        Properties props = new Properties();
        props.put("bootstrap.servers", "xxx.xxx.xx.xx:9092");
        props.put("key.serializer", StringSerializer.class);
        props.put("value.serializer", ByteArraySerializer.class);
        this.producer = new KafkaProducer(props);
        this.topic = topicName;

    }

    @Override
    public void run() {
        int messageNo = 1;
        try {
            for (; ; ) {
                String messageStr = "hello,world";
                System.out.println("-----------------------"+messageStr);
                producer.send(new ProducerRecord(topic, messageStr.getBytes()));
                //生产了100条就打印
                if (messageNo % 2 == 0) {
                    System.out.println("--------------------------------------------:" + messageNo + "条");
                    System.out.println("发送的信息:" + messageStr);
                    break;
                }
                //生产1000条就退出
                if (messageNo / 1000000 == 1) {
                    System.out.println("成功发送了" + messageNo + "条");
                    break;
                }
                int num = (int)(Math.random()*10) + 1;
                System.out.println(num);
                Thread.sleep(num*1000);
                messageNo++;
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            producer.close();
        }

    }

    public static void main(String args[]) {
        KafkaProducerTest test = new KafkaProducerTest("kafka-send");
        Thread thread = new Thread(test);
        thread.start();
    }
}

消费者代码

package com.kafka.demo;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Arrays;
import java.util.Properties;


public class KafkaConsumerTest implements Runnable {

    private final KafkaConsumer<String, String> consumer;
    private ConsumerRecords<String, String> msgList;
    private final String topic;
    private static final String GROUPID = "c_test";//消费者组,随便填

    public KafkaConsumerTest(String topicName) {
        Properties props = new Properties();
        props.put("bootstrap.servers", "localhost:9092");
        props.put("security.protocol", "PLAINTEXT");
        props.put("group.id", GROUPID);
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("auto.offset.reset", "earliest");//从何处开始消费,latest 表示消费最新消息,earliest 表示从头开始消费,none表示抛出异常,默认latest
        props.put("key.deserializer", StringDeserializer.class.getName());
        props.put("value.deserializer", StringDeserializer.class.getName());
        this.consumer = new KafkaConsumer<String, String>(props);
        this.topic = topicName;
        this.consumer.subscribe(Arrays.asList(topic));
    }

    @Override
    public void run() {
        int messageNo = 1;
        System.out.println("---------开始消费---------");
        try {
            for (; ; ) {
                msgList = consumer.poll(1000);
                if (null != msgList && msgList.count() > 0) {
                    for (ConsumerRecord<String, String> record : msgList) {
                        String value = record.value();
                        System.out.println(messageNo+"-----------------------------------------------------"+value);
                        messageNo++;
                        consumer.commitAsync();
                    }
                } else {
                    Thread.sleep(100);
                }
            }
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            consumer.close();
        }
    }

    public static void main(String args[]) {
        KafkaConsumerTest test1 = new KafkaConsumerTest("kafka-send");
        Thread thread1 = new Thread(test1);
        thread1.start();
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

常量侠

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值