【Kafka】使用Java连接Kafka并上传/读取数据

本文展示了如何使用Java实现Kafka的Producer和Consumer。Producer通过设置配置连接到Kafka服务器,发送消息到主题kafkademo01。Consumer部分创建了多个线程模拟多消费者订阅同一主题kafkademo01,读取并打印接收到的消息。
摘要由CSDN通过智能技术生成

使用Java连接Kafka并上传/读取数据

Producer

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

public class MyProducer {
	public static void main(String[] args) {
		Properties prop = new Properties();
		prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.134.150:9092");
		prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
        prop.put(ProducerConfig.ACKS_CONFIG,"-1");
        KafkaProducer<Object, String> producer = new KafkaProducer<>(prop);
        for (int i=0;i<200;i++){
        	ProducerRecord<Object, String> producerRecord=new ProducerRecord<>("kafkademo01","hello world "+i);
        	producer.send(producerRecord);
        	try {
        		Thread.sleep(100);
        	} catch (InterruptedException e) {
        		e.printStackTrace();
        	}
        }
        System.out.println("Game Over!");
	}
}

Consumer

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Collections;
import java.util.Properties;

public class MyConsumer {
    public static void main(String[] args) {
        Properties prop = new Properties();
        prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.134.150:9092");
        prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
        prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
        prop.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG,"30000");
        prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"false");
        prop.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000");
        //earliest latest none(会报错)
        prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");
        
//        prop.put(ConsumerConfig.GROUP_ID_CONFIG,"G1");
//        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
//        consumer.subscribe(Collections.singleton("kafkademo02"));
//        while (true) {
//            ConsumerRecords<String, String> poll = consumer.poll(100);
//            for (ConsumerRecord<String, String> record : poll) {
//                System.out.println(record.offset()+"\t"+record.key()+"\t"+record.value());
//            }
//        }

        //一个消费者组G1里只有一个消费者
        //模拟多消费者在同一个分组G2
        prop.put(ConsumerConfig.GROUP_ID_CONFIG,"G2");
        for (int i=0;i<4;i++){
            new Thread(new Runnable() {
                @Override
                public void run() {
                    KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
                    consumer.subscribe(Collections.singleton("kafkademo01"));
                    while (true) {
                        ConsumerRecords<String, String> poll = consumer.poll(100);
                        for (ConsumerRecord<String, String> record : poll) {
                            System.out.println(Thread.currentThread().getName()+"\t"+record.offset()+"\t"+record.key()+"\t"+record.value());
                        }
                    }
                }
            }).start();
        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值